def get_data(self, name): """ Get the data from S3 instead of filesystem. The filename is built as '<DBNAME>/<FILENAME>' in the given S3 bucket :param name: name of field name :return: Bytearray of the file binary """ if not config.has_section('attachment_s3'): return super(Attachment, self).get_data(name) s3_conn = S3Connection( config.get('attachment_s3', 'access_key'), config.get('attachment_s3', 'secret_key') ) bucket = s3_conn.get_bucket(config.get('attachment_s3', 'bucket_name')) db_name = Transaction().database.name format_ = Transaction().context.pop( '%s.%s' % (self.__name__, name), '' ) value = None if name == 'data_size' or format_ == 'size': value = 0 if self.digest: filename = self.digest if self.collision: filename = filename + '-' + str(self.collision) filename = "/".join([db_name, filename]) if name == 'data_size' or format_ == 'size': key = bucket.lookup(filename) if key is not None: # Get the size only if bucket has key; value = key.size else: k = Key(bucket) k.key = filename try: value = bytearray(k.get_contents_as_string()) except S3ResponseError: self.raise_user_error( "no_such_key", error_args=(self.name, filename) ) return value
def get_data(self, name): """ Get the data from S3 instead of filesystem. The filename is built as '<DBNAME>/<FILENAME>' in the given S3 bucket :param name: name of field name :return: Buffer of the file binary """ if not config.has_section('attachment_s3'): return super(Attachment, self).get_data(name) s3_conn = S3Connection(config.get('attachment_s3', 'access_key'), config.get('attachment_s3', 'secret_key')) bucket = s3_conn.get_bucket(config.get('attachment_s3', 'bucket_name')) db_name = Transaction().database.name format_ = Transaction().context.pop('%s.%s' % (self.__name__, name), '') value = None if name == 'data_size' or format_ == 'size': value = 0 if self.digest: filename = self.digest if self.collision: filename = filename + '-' + str(self.collision) filename = "/".join([db_name, filename]) if name == 'data_size' or format_ == 'size': key = bucket.lookup(filename) if key is not None: # Get the size only if bucket has key; value = key.size else: k = Key(bucket) k.key = filename try: value = buffer(k.get_contents_as_string()) except S3ResponseError: self.raise_user_error("no_such_key", error_args=(self.name, filename)) return value
def setUp(self): super().setUp() if not config.has_section('marketing'): config.add_section('marketing') subscribe_url = config.get('marketing', 'email_subscribe_url', default='') config.set('marketing', 'email_subscribe_url', SUBSCRIBE_URL) self.addCleanup(lambda: config.set('marketing', 'email_subscribe_url', subscribe_url)) unsubscribe_url = config.get('marketing', 'email_unsubscribe_url', default='') config.set('marketing', 'email_unsubscribe_url', UNSUBSCRIBE_URL) self.addCleanup(lambda: config.set( 'marketing', 'email_unsubscribe_url', unsubscribe_url)) spy_pixel = config.get('marketing', 'email_spy_pixel', default='') config.set('marketing', 'email_spy_pixel', 'true') self.addCleanup( lambda: config.set('marketing', 'email_spy_pixel', spy_pixel)) from_ = config.get('email', 'from', default='') config.set('email', 'from', FROM) self.addCleanup(lambda: config.set('email', 'from', from_))
return None, None return loader app = TrytondWSGI() if config.get('web', 'root'): static_files = { '/': config.get('web', 'root'), } app.wsgi_app = SharedDataMiddlewareIndex( app.wsgi_app, static_files, cache_timeout=config.getint('web', 'cache_timeout')) num_proxies = config.getint('web', 'num_proxies') if num_proxies: app.wsgi_app = NumProxyFix(app.wsgi_app, num_proxies) if config.has_section('wsgi middleware'): for middleware in config.options('wsgi middleware'): Middleware = resolve(config.get('wsgi middleware', middleware)) args, kwargs = (), {} section = 'wsgi %s' % middleware if config.has_section(section): if config.has_option(section, 'args'): args = eval(config.get(section, 'args')) if config.has_option(section, 'kwargs'): kwargs = eval(config.get(section, 'kwargs')) app.wsgi_app = Middleware(app.wsgi_app, *args, **kwargs) import trytond.protocols.dispatcher # noqa: E402,F401 import trytond.bus # noqa: E402,F401
def set_data(cls, attachments, name, value): """ Save the attachment to S3 instead of the filesystem :param attachments: List of ir.attachment instances :param name: name of the field :param value: binary data of the attachment (string) """ if not config.has_section('attachment_s3'): return super(Attachment, cls).set_data(attachments, name, value) s3_conn = S3Connection(config.get('attachment_s3', 'access_key'), config.get('attachment_s3', 'secret_key')) bucket = s3_conn.get_bucket(config.get('attachment_s3', 'bucket_name')) if value is None: return cursor = Transaction().connection.cursor() db_name = Transaction().database.name if hashlib: digest = hashlib.md5(value).hexdigest() else: digest = md5.new(value).hexdigest() filename = "/".join([db_name, digest]) collision = 0 if bucket.get_key(filename): key2 = Key(bucket) key2.key = filename data2 = key2.get_contents_as_string() if value != data2: cursor.execute( 'SELECT DISTINCT(collision) ' 'FROM ir_attachment ' 'WHERE digest = %s ' 'AND collision != 0 ' 'ORDER BY collision', (digest, )) collision2 = 0 for row in cursor.fetchall(): collision2 = row[0] filename = "/".join( [db_name, digest + '-' + str(collision2)]) if bucket.get_key(filename): key2 = Key(bucket) key2.key = filename data2 = key2.get_contents_as_string() if value == data2: collision = collision2 break if collision == 0: collision = collision2 + 1 filename = "/".join( [db_name, digest + '-' + str(collision)]) key = Key(bucket) key.key = filename key.set_contents_from_string(value[:]) else: key = Key(bucket) key.key = filename key.set_contents_from_string(value[:]) cls.write(attachments, { 'digest': digest, 'collision': collision, })
def set_data(cls, attachments, name, value): """ Save the attachment to S3 instead of the filesystem :param attachments: List of ir.attachment instances :param name: name of the field :param value: binary data of the attachment (string) """ if not config.has_section('attachment_s3'): return super(Attachment, cls).set_data(attachments, name, value) s3_conn = S3Connection( config.get('attachment_s3', 'access_key'), config.get('attachment_s3', 'secret_key') ) bucket = s3_conn.get_bucket(config.get('attachment_s3', 'bucket_name')) if value is None: return cursor = Transaction().connection.cursor() db_name = Transaction().database.name if hashlib: digest = hashlib.md5(value).hexdigest() else: digest = md5.new(value).hexdigest() filename = "/".join([db_name, digest]) collision = 0 if bucket.get_key(filename): key2 = Key(bucket) key2.key = filename data2 = key2.get_contents_as_string() if value != data2: cursor.execute('SELECT DISTINCT(collision) ' 'FROM ir_attachment ' 'WHERE digest = %s ' 'AND collision != 0 ' 'ORDER BY collision', (digest,)) collision2 = 0 for row in cursor.fetchall(): collision2 = row[0] filename = "/".join([ db_name, digest + '-' + str(collision2) ]) if bucket.get_key(filename): key2 = Key(bucket) key2.key = filename data2 = key2.get_contents_as_string() if value == data2: collision = collision2 break if collision == 0: collision = collision2 + 1 filename = "/".join([ db_name, digest + '-' + str(collision) ]) key = Key(bucket) key.key = filename key.set_contents_from_string(value[:]) else: key = Key(bucket) key.key = filename key.set_contents_from_string(value[:]) cls.write(attachments, { 'digest': digest, 'collision': collision, })