def create_file(self, file): if self.validate_file(file, action='create') == False: raise IOError('validation failed') folder = self.session.query(Folder).filter_by(id=file.get('folder_id'), user_id=self.uid).first() uuid = get_uuid(str(self.uid), folder.path, file['name']) #content = unicode(file.get('content'), "ISO-8859-1") content = file.get('content').encode("ISO-8859-1") child_files = self.read_folder(folder.path)['files'] for i in child_files: if file['name'] == i['name']: return 'file already exists' now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') file_rec = File(folder_id=folder.id, name=file['name'], user_id=self.uid, location=uuid, type=self.storage_type, deleted=False, ctime=now) try: upload_result = self.storage.create_object(content=content, uuid=uuid) if upload_result: self.session.add(file_rec) self.session.commit() return file_rec.serialize else: self.session.rollback() except IOError: self.session.rollback()
def get_logo_id_new(logo_url, download_crawler, source, sourceId, catename): mongo = db.connect_mongo() # imgfs = gridfs.GridFS(mongo.gridfs) name = None height = None width = None if logo_url is not None and len(logo_url.strip()) > 0: logger.info("Download logo: %s", logo_url) # (image_value, width, height) = download_crawler.get_image_size(logo_url) (image_file, width, height) = download_crawler.get_image_size_new(logo_url) if image_file is not None: # logo_id = imgfs.put(image_value, content_type='jpeg', filename='%s_%s_%s.jpg' % (catename, source, sourceId)) # out = imgfs.get(ObjectId(logo_id)) name = util.get_uuid() logger.info("%s->%s|%s", logo_url, name, image_file) if source in [13835, 13836, 13613]: # img, width, height = util.convert_image(out, out.name, size=1024) img, width, height = util.convert_image(image_file, name, size=1024) elif source in [13613, 13803]: img, width, height = util.convert_image(image_file, name, size=width) else: # img, width, height = util.convert_image(out, out.name) img, width, height = util.convert_image(image_file, name) headers = {"Content-Type": "image/jpeg"} # oss2put.put(str(logo_id), img, headers=headers) oss2put.put(name, img, headers=headers) # mongo.close() return (name, width, height)
def _postMetrics(self): if len(self._metrics) > 0: self._metrics['uuid'] = get_uuid() self._metrics['internalHostname'] = gethostname(self._agentConfig) self._metrics['apiKey'] = self._agentConfig['api_key'] MetricTransaction(self._metrics, {}) self._metrics = {}
def _postMetrics(self): if len(self._metrics) > 0: self._metrics['uuid'] = get_uuid() self._metrics['internalHostname'] = get_hostname(self._agentConfig) self._metrics['apiKey'] = self._agentConfig['api_key'] MetricTransaction(json.dumps(self._metrics), headers={'Content-Type': 'application/json'}) self._metrics = {}
def _postMetrics(self): if len(self._metrics) > 0: self._metrics["uuid"] = get_uuid() self._metrics["internalHostname"] = get_hostname(self._agentConfig) self._metrics["apiKey"] = self._agentConfig["api_key"] MetricTransaction(json.dumps(self._metrics), headers={"Content-Type": "application/json"}) self._metrics = {}
def add(new_drink): with open(JSON_FILE_PATH, "r+") as file: data = json.load(file) file.seek(0) file.truncate() new_drink['id'] = util.get_uuid() data.append(new_drink) json.dump(data, file) return True
def upload_files(): if request.method == 'POST': uploaded_files = request.files.getlist('files[]') name = request.form['name'] path = os.path.join(app.config['UPLOAD_FOLDER'], name) if not os.path.exists(path): os.makedirs(path) for f in uploaded_files: f.save(os.path.join(path, f.filename)) des = 'tmp/%s' % util.get_uuid() src = 'tmp/%s' % util.get_uuid() dirpath = os.path.join(app.config['UPLOAD_FOLDER'], name) shutil.copytree(dirpath, os.path.join(src, name)) net.align_dataset(des, src, 0.25, True, 32, 160) shutil.copytree( os.path.join(des, name), os.path.join(app.config['UPLOAD_TRAIN_ALIGN_FOLDER'], name)) shutil.rmtree(src) shutil.rmtree(des) shutil.copytree( os.path.join(app.config['UPLOAD_TRAIN_ALIGN_FOLDER'], name), os.path.join('tmp', name)) res = net.train(False, 'TRAIN', 'tmp', 20, 10, '20170512-110547/20170512-110547.pb', 'classifiers/%s_classifier.pkl' % name, 1000, 160) shutil.rmtree(os.path.join('tmp', name)) img_list = [os.path.join(path, f.filename) for f in uploaded_files] img_align_path = os.path.join(app.config['UPLOAD_TRAIN_ALIGN_FOLDER'], name) img_align_list = [ os.path.join( os.path.join(img_align_path, f.filename.split('.')[0] + '.png')) for f in uploaded_files ] return render_template('index.html', img_align_list=img_align_list, name=name)
def _build_payload(self, start_event=True): """ Return an dictionary that contains all of the generic payload data. """ now = time.time() payload = { 'collection_timestamp': now, 'os' : self.os, 'python': sys.version, 'agentVersion' : self.agentConfig['version'], 'apiKey': self.agentConfig['api_key'], 'events': {}, 'metrics': [], 'service_checks': [], 'resources': {}, 'internalHostname' : get_hostname(self.agentConfig), 'uuid' : get_uuid(), 'host-tags': {}, } # Include system stats on first postback if start_event and self._is_first_run(): payload['systemStats'] = self.agentConfig.get('system_stats', {}) # Also post an event in the newsfeed payload['events']['System'] = [{'api_key': self.agentConfig['api_key'], 'host': payload['internalHostname'], 'timestamp': now, 'event_type':'Agent Startup', 'msg_text': 'Version %s' % get_version() }] # Periodically send the host metadata. if self._is_first_run() or self._should_send_metadata(): payload['systemStats'] = get_system_stats() payload['meta'] = self._get_metadata() self.metadata_cache = payload['meta'] # Add static tags from the configuration file host_tags = [] if self.agentConfig['tags'] is not None: host_tags.extend([unicode(tag.strip()) for tag in self.agentConfig['tags'].split(",")]) if self.agentConfig['collect_ec2_tags']: host_tags.extend(EC2.get_tags()) if host_tags: payload['host-tags']['system'] = host_tags GCE_tags = GCE.get_tags() if GCE_tags is not None: payload['host-tags'][GCE.SOURCE_TYPE_NAME] = GCE_tags # Log the metadata on the first run if self._is_first_run(): log.info("Hostnames: %s, tags: %s" % (repr(self.metadata_cache), payload['host-tags'])) return payload
def serialize_metrics(metrics, hostname, ip): try: metrics.append(add_serialization_status_metric("success", hostname)) serialized = json.dumps({ "series": metrics, 'uuid': get_uuid(), 'ip': ip }) except UnicodeDecodeError as e: log.exception( "Unable to serialize payload. Trying to replace bad characters. %s", e) metrics.append(add_serialization_status_metric("failure", hostname)) try: log.error(metrics) serialized = json.dumps({ "series": unicode_metrics(metrics), 'uuid': get_uuid(), 'ip': ip }) except Exception as e: log.exception("Unable to serialize payload. Giving up. %s", e) serialized = json.dumps({ "series": [ add_serialization_status_metric("permanent_failure", hostname) ], 'uuid': get_uuid(), 'ip': ip }) if len(serialized) > COMPRESS_THRESHOLD: headers = { 'Content-Type': 'application/json', 'Content-Encoding': 'deflate' } serialized = zlib.compress(serialized) else: headers = {'Content-Type': 'application/json'} return serialized, headers
def __init__(self): c = get_config() self.request_interval = c.get("request_interval", 120) self.requests = requests.session() self.uuid = get_uuid() self.apikey = c.get("api_key") self.post_param = {"apikey": self.apikey} self.urls = self.init_url(c) self.run_path = os.path.join(os.path.dirname(get_confd_path()), 'run') self.script_path = get_checksd_path().replace("checks.d", "scripts") self.need_restart = False
def _build_payload(self, start_event=True): """ Return an dictionary that contains all of the generic payload data. """ now = time.time() payload = { "collection_timestamp": now, "os": self.os, "python": sys.version, "agentVersion": self.agentConfig["version"], "apiKey": self.agentConfig["api_key"], "events": {}, "metrics": [], "resources": {}, "internalHostname": get_hostname(self.agentConfig), "uuid": get_uuid(), "host-tags": {}, } # Include system stats on first postback if start_event and self._is_first_run(): payload["systemStats"] = self.agentConfig.get("system_stats", {}) # Also post an event in the newsfeed payload["events"]["System"] = [ { "api_key": self.agentConfig["api_key"], "host": payload["internalHostname"], "timestamp": now, "event_type": "Agent Startup", "msg_text": "Version %s" % get_version(), } ] # Periodically send the host metadata. if self._is_first_run() or self._should_send_metadata(): payload["systemStats"] = get_system_stats() payload["meta"] = self._get_metadata() self.metadata_cache = payload["meta"] # Add static tags from the configuration file host_tags = [] if self.agentConfig["tags"] is not None: host_tags.extend([unicode(tag.strip()) for tag in self.agentConfig["tags"].split(",")]) if self.agentConfig["collect_ec2_tags"]: host_tags.extend(EC2.get_tags()) if host_tags: payload["host-tags"]["system"] = host_tags # Log the metadata on the first run if self._is_first_run(): log.info(u"Hostnames: %s, tags: %s" % (repr(self.metadata_cache), payload["host-tags"])) return payload
def _build_payload(self, start_event=True): """ Return an dictionary that contains all of the generic payload data. """ now = time.time() payload = { 'collection_timestamp': now, 'os': self.os, 'python': sys.version, 'agentVersion': self.agentConfig['version'], 'apiKey': self.agentConfig['api_key'], 'events': {}, 'metrics': [], 'resources': {}, 'internalHostname': get_hostname(self.agentConfig), 'uuid': get_uuid(), } # Include system stats on first postback if start_event and self._is_first_run(): payload['systemStats'] = self.agentConfig.get('system_stats', {}) # Also post an event in the newsfeed payload['events']['System'] = [{ 'api_key': self.agentConfig['api_key'], 'host': payload['internalHostname'], 'timestamp': now, 'event_type': 'Agent Startup', 'msg_text': 'Version %s' % get_version() }] # Periodically send the host metadata. if self._is_first_run() or self._should_send_metadata(): payload['systemStats'] = get_system_stats() payload['meta'] = self._get_metadata() self.metadata_cache = payload['meta'] # Add static tags from the configuration file if self.agentConfig['tags'] is not None: payload['tags'] = self.agentConfig['tags'] # Log the metadata on the first run if self._is_first_run(): if self.agentConfig['tags'] is not None: log.info(u"Hostnames: %s, tags: %s" \ % (repr(self.metadata_cache), self.agentConfig['tags'])) else: log.info(u"Hostnames: %s" % repr(self.metadata_cache)) return payload
def add(new_drink): with open(JSON_FILE_PATH, "r+") as file: data = json.load(file) for drink in data: if drink['pump_number'] == new_drink['pump_number'] : return False file.seek(0) file.truncate() new_drink['id'] = util.get_uuid() data.append(new_drink) json.dump(data, file) for drink in data: if drink['id'] == new_drink['id']: return drink
def submit_service_checks(self, service_checks): headers = {'Content-Type': 'application/json'} params = {} if self.api_key: params['api_key'] = self.api_key for check in service_checks: check['uuid'] = get_uuid() check['ip'] = self.ip url = '{0}/api/v1/check_run?{1}'.format(self.api_host, urlencode(params)) self.submit_http(url, json.dumps(service_checks), headers)
def _postAgentInfoToServer(self): import json import time import requests import platform if not self._send_controler % 12: self._send_controler += 1 return self._send_controler += 1 hostname = get_hostname(self._agentConfig) if self._agentConfig['tags']: tags = self._agentConfig['tags'].split(',') else: tags = [] hostid = get_uuid() url = self._agentConfig['m_url'].replace('api/v2/gateway/dd-agent', 'api/v2/agent/info/intake?api_key=') + \ self._agentConfig['api_key'] modified = time.mktime(time.localtime(time.time())) path = '' if platform.system() == 'Linux': path = _unix_confd_path() elif platform.system() == 'Windows': path = _windows_confd_path() if path: file_list = os.walk(path).next()[2] apps = [ f for f in file_list if f.endswith(".yaml") or f.endswith(".yaml.default") ] post_data = { "id": str(hostid), "hostname": hostname, "ip": self.ip, "tags": tags, "apps": apps, "source": "agent", "modified": int(modified), } try: headers = {"content-type": "application/json"} r = requests.post(url=url, data=json.dumps(post_data), headers=headers, verify=False) log.debug("http return code: %s" % str(r.status_code)) except Exception, e: log.error( "Uncaught exception on self._postAgentInfoToServer: %s" % e)
def run(): collection = mongo.task.data_report while True: items = collection.find({'processStatus': 0}).limit(10) for item in items: try: startDate, endDate = item['param']['startDate'], item['param'][ 'endDate'] logger.info('processing %s ~ %s.xlsx' % (startDate, endDate)) df, columns = data_code.run2(conn, mongo, startDate=startDate, endDate=endDate, param=item['param']) df.to_excel('test.xlsx', index=0, columns=columns, encoding="utf-8") path = os.path.join(sys.path[0], 'test.xlsx') fileid = util.get_uuid() oss = oss2_helper.Oss2Helper("xiniudata-report") fp = file(path, "rb") oss.put( fileid, fp, headers={ "Content-Type": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", "x-oss-meta-filename": 'funding_news_report_%s~%s.xlsx' % (startDate, endDate) }) fp.close() logger.info('uploaded funding_news_report_%s ~ %s.xlsx' % (startDate, endDate)) collection.update_one({'_id': item['_id']}, { '$set': { 'processStatus': 1, 'link': 'http://www.xiniudata.com/file/report/%s' % fileid } }) except Exception as e: logger.info(e) logger.info('sleep') time.sleep(30)
def _build_payload(self, start_event=True): """ Return an dictionary that contains all of the generic payload data. """ now = time.time() payload = { 'collection_timestamp': now, 'os' : self.os, 'python': sys.version, 'agentVersion' : self.agentConfig['version'], 'apiKey': self.agentConfig['api_key'], 'events': {}, 'metrics': [], 'resources': {}, 'internalHostname' : get_hostname(self.agentConfig), 'uuid' : get_uuid(), } # Include system stats on first postback if start_event and self._is_first_run(): payload['systemStats'] = self.agentConfig.get('system_stats', {}) # Also post an event in the newsfeed payload['events']['System'] = [{'api_key': self.agentConfig['api_key'], 'host': payload['internalHostname'], 'timestamp': now, 'event_type':'Agent Startup', 'msg_text': 'Version %s' % get_version() }] # Periodically send the host metadata. if self._is_first_run() or self._should_send_metadata(): payload['systemStats'] = get_system_stats() payload['meta'] = self._get_metadata() self.metadata_cache = payload['meta'] # Add static tags from the configuration file if self.agentConfig['tags'] is not None: payload['tags'] = self.agentConfig['tags'] # Log the metadata on the first run if self._is_first_run(): if self.agentConfig['tags'] is not None: log.info(u"Hostnames: %s, tags: %s" \ % (repr(self.metadata_cache), self.agentConfig['tags'])) else: log.info(u"Hostnames: %s" % repr(self.metadata_cache)) return payload
def _build_payload(self, payload): now = time.time() payload['ip'] = self.ip payload['collection_timestamp'] = now payload['os'] = self.os payload['python'] = sys.version payload['agentVersion'] = self.agent_config['version'] payload['apiKey'] = self.agent_config['api_key'] payload['events'] = {} payload['metrics'] = [] payload['service_checks'] = [] payload['resources'] = {} payload['internalHostname'] = self.hostname payload['uuid'] = get_uuid() payload['host-tags'] = {} payload['external_host_tags'] = {}
def submit_events(self, events): headers = {"Content-Type": "application/json"} event_chunk_size = self.event_chunk_size for chunk in chunks(events, event_chunk_size): payload = { "apiKey": self.api_key, "events": {"api": chunk}, "uuid": get_uuid(), "internalHostname": get_hostname(), } params = {} if self.api_key: params["api_key"] = self.api_key url = "%s/intake?%s" % (self.api_host, urlencode(params)) self.submit_http(url, json.dumps(payload), headers)
def _build_payload(self, payload): """ Build the payload skeleton, so it contains all of the generic payload data. """ now = time.time() payload['collection_timestamp'] = now payload['os'] = self.os payload['python'] = sys.version payload['agentVersion'] = self.agentConfig['version'] payload['apiKey'] = self.agentConfig['api_key'] payload['events'] = {} payload['metrics'] = [] payload['service_checks'] = [] payload['resources'] = {} payload['internalHostname'] = self.hostname payload['uuid'] = get_uuid() payload['host-tags'] = {} payload['external_host_tags'] = {}
def submit_events(self, events): headers = {'Content-Type':'application/json'} event_chunk_size = self.event_chunk_size for chunk in chunks(events, event_chunk_size): payload = { 'apiKey': self.api_key, 'events': { 'api': chunk }, 'uuid': get_uuid(), 'internalHostname': get_hostname() } params = {} if self.api_key: params['api_key'] = self.api_key url = '%s/intake?%s' % (self.api_host, urlencode(params)) self.submit_http(url, json.dumps(payload), headers)
def _build_payload(self): """ Return an dictionary that contains all of the generic payload data. """ payload = { 'collection_timestamp': time.time(), 'os': self.os, 'python': sys.version, 'agentVersion': self.agentConfig['version'], 'apiKey': self.agentConfig['api_key'], 'events': {}, 'metrics': [], 'resources': {}, 'internalHostname': gethostname(self.agentConfig), 'uuid': get_uuid(), } # Include system stats on first postback if self._is_first_run(): payload['systemStats'] = self.agentConfig.get('systemStats', {}) # Also post an event in the newsfeed payload['events']['System'] = [{ 'api_key': self.agentConfig['api_key'], 'host': payload['internalHostname'], 'timestamp': int(time.mktime(datetime.datetime.now().timetuple())), 'event_type': 'Agent Startup', 'msg_text': 'Version %s' % get_version() }] # Periodically send the host metadata. if self._is_first_run() or self._should_send_metadata(): payload['meta'] = self._get_metadata() # Add static tags from the configuration file if self.agentConfig['tags'] is not None: payload['tags'] = self.agentConfig['tags'] return payload
def _build_payload(self, payload): """ Build the payload skeleton, so it contains all of the generic payload data. """ now = time.time() payload["collection_timestamp"] = now payload["os"] = self.os payload["python"] = sys.version payload["agentVersion"] = self.agentConfig["version"] payload["apiKey"] = self.agentConfig["api_key"] payload["events"] = {} payload["metrics"] = [] payload["service_checks"] = [] payload["resources"] = {} payload["internalHostname"] = self.hostname payload["uuid"] = get_uuid() payload["host-tags"] = {} payload["external_host_tags"] = {}
def _build_payload(self, payload): now = time.time() payload['ip'] = self.ip payload['collection_timestamp'] = now payload['os'] = self.os payload['python'] = sys.version payload['agentVersion'] = self.agentConfig['version'] payload['apiKey'] = self.agentConfig['api_key'] payload['events'] = {} payload['metrics'] = [] payload['service_checks'] = [] payload['resources'] = {} payload['internalHostname'] = self.hostname payload['uuid'] = get_uuid() payload['machine_type'] = get_machine_type() log.info('UUID: {0}'.format(payload['uuid'])) log.info('Machine_Type: {0}'.format(payload['machine_type'])) payload['host-tags'] = {} payload['external_host_tags'] = {}
def create_file(self, folder_id, file_name): folder = self.session.query(Folder).filter_by(id=folder_id, user_id=self.uid).first() uuid = get_uuid(folder.path, file_name) child_files = self.read_folder(folder_id)['files'] for i in child_files: if file_name == i['name']: return 'file already exists' file = File(folder_id=folder.id, name=file_name, user_id=self.uid, location=uuid, type='sheepdog', deleted=False) upload_result = self.storage.create_object(file=file_name, uuid=uuid) if upload_result: self.session.add(file) self.session.commit() else: self.session.rollback()
def _build_payload(self): """ Return an dictionary that contains all of the generic payload data. """ payload = { 'collection_timestamp': time.time(), 'os' : self.os, 'python': sys.version, 'agentVersion' : self.agentConfig['version'], 'apiKey': self.agentConfig['api_key'], 'events': {}, 'metrics': [], 'resources': {}, 'internalHostname' : gethostname(self.agentConfig), 'uuid' : get_uuid(), } # Include system stats on first postback if self._is_first_run(): payload['systemStats'] = self.agentConfig.get('system_stats', {}) # Also post an event in the newsfeed payload['events']['System'] = [{'api_key': self.agentConfig['api_key'], 'host': payload['internalHostname'], 'timestamp': int(time.mktime(datetime.datetime.now().timetuple())), 'event_type':'Agent Startup', 'msg_text': 'Version %s' % get_version() }] # Periodically send the host metadata. if self._is_first_run() or self._should_send_metadata(): payload['meta'] = self._get_metadata() self.metadata_cache = payload['meta'] # Add static tags from the configuration file if self.agentConfig['tags'] is not None: payload['tags'] = self.agentConfig['tags'] return payload
def submit_events(self, events): headers = {'Content-Type':'application/json'} method = 'POST' events_len = len(events) event_chunk_size = self.event_chunk_size for chunk in chunks(events, event_chunk_size): payload = { 'apiKey': self.api_key, 'events': { 'api': chunk }, 'uuid': get_uuid(), 'internalHostname': get_hostname() } params = {} if self.api_key: params['api_key'] = self.api_key url = '/intake?%s' % urlencode(params) status = None conn = self.http_conn_cls(self.api_host) try: start_time = time() conn.request(method, url, json.dumps(payload), headers) response = conn.getresponse() status = response.status response.close() duration = round((time() - start_time) * 1000.0, 4) log.debug("%s %s %s%s (%sms)" % ( status, method, self.api_host, url, duration)) finally: conn.close()
def __init__(self, system, linux_conf_path, window_conf_path, central_configuration_url, central_configuration_api_key): self.os = system.lower() self.linux_conf_path = linux_conf_path self.window_conf_path = window_conf_path self.central_configuration_url = central_configuration_url self.central_configuration_api_key = central_configuration_api_key self.agent_id = get_uuid() self.done = 0 self.path = { "linux": os.path.join(self.linux_conf_path, "conf.d/"), "windows": os.path.join(self.window_conf_path, "conf.d/"), } self.apis = { "client_info": "{}list?id={}&source=agent&api_key={}".format( self.central_configuration_url, self.agent_id, self.central_configuration_api_key), "client_download_file": "{}file?id={}&source=agent&api_key={}&name=".format( self.central_configuration_url, self.agent_id, self.central_configuration_api_key), }
def submit_events(self, events): headers = {'Content-Type': 'application/json'} method = 'POST' events_len = len(events) event_chunk_size = self.event_chunk_size for chunk in chunks(events, event_chunk_size): payload = { 'apiKey': self.api_key, 'events': { 'api': chunk }, 'uuid': get_uuid(), 'internalHostname': get_hostname() } params = {} if self.api_key: params['api_key'] = self.api_key url = '/intake?%s' % urlencode(params) status = None conn = self.http_conn_cls(self.api_host) try: start_time = time() conn.request(method, url, json.dumps(payload), headers) response = conn.getresponse() status = response.status response.close() duration = round((time() - start_time) * 1000.0, 4) log.debug("%s %s %s%s (%sms)" % (status, method, self.api_host, url, duration)) finally: conn.close()
def process(org): if org["coldcall_imap_server"] is None: return logger.info("orgId: %s, orgName: %s", org["id"], org["name"]) re_name = re.compile( '([\[\(] *)?(RE?S?|FYI|RIF|I|FS|VB|RV|ENC|ODP|PD|YNT|ILT|SV|VS|VL|AW|WG|ΑΠ|ΣΧΕΤ|ΠΡΘ|תגובה|הועבר|主题|转发|FWD?) *([-:;)\]][ :;\])-]*|$)|\]+ *$', re.IGNORECASE) while True: msgs = email_reader.receive(org["coldcall_imap_server"], org["coldcall_imap_port"], org["coldcall_username"], org["coldcall_password"], one=True) if len(msgs) == 0: break for msg in msgs: if msg["html"] is not None: parser = html2text.HTML2Text() parser.ignore_emphasis = True parser.single_line_break = True msg["html_text"] = parser.handle(msg["html"]) else: msg["html_text"] = None logger.info(msg["subject"]) logger.info(msg["from"]) logger.info(msg["to"]) logger.info(msg["cc"]) # logger.info(msg["body"]) # logger.info(msg["html_text"]) logger.info("attachments=%d" % len(msg["attachments"])) for attach in msg["attachments"]: logger.info(attach.name) title = re_name.sub('', msg["subject"]).strip() title_md5 = util.md5str(title) #insert conn = db.connect_torndb() cc = conn.get( "select * from sourcedeal where orgId=%s and titleMd5=%s and origin=%s limit 1", org["id"], title_md5, msg["from"]) conn.close() if cc is not None: logger.info("%s Exists!" % title) continue content = msg["html_text"] if content is None: content = msg["body"] if content is None: content = "" content = content.strip() if len(content) > 20000: content = content[0:20000] sponsor_id = find_user(org["id"], msg["from"]) logger.info("sponsor_id=%s" % sponsor_id) assignee_id = find_user(org["id"], msg["cc"]) logger.info("assignee_id=%s" % assignee_id) conn = db.connect_torndb() cc_id = conn.insert( "insert sourcedeal(title,titleMd5,content,orgId,createTime,origin,assignee,sponsor) \ values(%s,%s,%s,%s,%s,%s,%s,%s)", title, title_md5, content, org["id"], msg["date"], msg["from"], assignee_id, sponsor_id) if assignee_id is None: ids = get_investment_manager_ids(org["id"]) assignee_id = choice(ids) conn.update("update sourcedeal set assignee=%s where id=%s", assignee_id, cc_id) conn.insert( "insert sourcedeal_forward(sourcedealId,toUserId,createTime) " "values(%s,%s,%s)", cc_id, assignee_id, msg["date"]) else: conn.insert( "insert sourcedeal_forward(sourcedealId,fromUserId,toUserId,createTime) " "values(%s,%s,%s,%s)", cc_id, sponsor_id, assignee_id, msg["date"]) for attach in msg["attachments"]: if attach.name is not None and attach.name.strip() != "": name = attach.name.strip() if not name.lower().endswith("pdf") and \ not name.lower().endswith("rar") and \ not name.lower().endswith("zip") and \ not name.lower().endswith("7z") and \ not name.lower().endswith("ppt") and \ not name.lower().endswith("pptx") and \ not name.lower().endswith("doc") and \ not name.lower().endswith("docx") and \ not name.lower().endswith("xls") and \ not name.lower().endswith("xlsx"): continue (content_type, encoding) = mimetypes.guess_type(name) if content_type is None: content_type = "application/octet-stream" data = attach.getvalue() # mongo = db.connect_mongo() # imgfs = gridfs.GridFS(mongo.gridfs) # logo_id = imgfs.put(data, content_type=content_type, filename=name) # mongo.close() logo_id = util.get_uuid() logger.info("gridfs logo_id=%s" % logo_id) oss2 = oss2_helper.Oss2Helper() headers = {"Content-Type": content_type} oss2.put(str(logo_id), data, headers=headers) conn.insert( "insert sourcedeal_file(sourcedealId,filename,fileId,createTime) " "values(%s,%s,%s,%s)", cc_id, name, logo_id, msg["date"]) conn.close()
class CollectorStatus(AgentStatus): NAME = 'Collector' def __init__(self, check_statuses=None, emitter_statuses=None, metadata=None): AgentStatus.__init__(self) self.check_statuses = check_statuses or [] self.emitter_statuses = emitter_statuses or [] self.host_metadata = metadata or [] @property def status(self): for check_status in self.check_statuses: if check_status.status == STATUS_ERROR: return STATUS_ERROR return STATUS_OK def has_error(self): return self.status != STATUS_OK @staticmethod def check_status_lines(cs): check_lines = [' ' + cs.name, ' ' + '-' * len(cs.name)] if cs.init_failed_error: check_lines.append( " - initialize check class [%s]: %s" % (style(STATUS_ERROR, 'red'), repr(cs.init_failed_error))) if cs.init_failed_traceback: check_lines.extend( ' ' + line for line in cs.init_failed_traceback.split('\n')) else: for s in cs.instance_statuses: c = 'green' if s.has_warnings(): c = 'yellow' if s.has_error(): c = 'red' line = " - instance #%s [%s]" % (s.instance_id, style(s.status, c)) if s.has_error(): line += u": %s" % s.error if s.metric_count is not None: line += " collected %s metrics" % s.metric_count if s.instance_check_stats is not None: line += " Last run duration: %s" % s.instance_check_stats.get( 'run_time') check_lines.append(line) if s.has_warnings(): for warning in s.warnings: warn = warning.split('\n') if not len(warn): continue check_lines.append( u" %s: %s" % (style("Warning", 'yellow'), warn[0])) check_lines.extend(u" %s" % l for l in warn[1:]) if s.traceback is not None: check_lines.extend(' ' + line for line in s.traceback.split('\n')) check_lines += [ " - Collected %s metric%s, %s event%s & %s service check%s" % (cs.metric_count, plural( cs.metric_count), cs.event_count, plural(cs.event_count), cs.service_check_count, plural(cs.service_check_count)), ] if cs.check_stats is not None: check_lines += [ " - Stats: %s" % pretty_statistics(cs.check_stats) ] if cs.library_versions is not None: check_lines += [" - Dependencies:"] for library, version in cs.library_versions.iteritems(): check_lines += [" - %s: %s" % (library, version)] check_lines += [""] return check_lines @staticmethod def render_check_status(cs): indent = " " lines = [indent + l for l in CollectorStatus.check_status_lines(cs)] + ["", ""] return "\n".join(lines) def body_lines(self): metadata_whitelist = ['hostname', 'fqdn', 'ipv4', 'instance-id'] lines = ['Clocks', '======', ''] try: ntp_offset, ntp_styles = get_ntp_info() lines.append(' ' + style('NTP offset', *ntp_styles) + ': ' + style('%s s' % round(ntp_offset, 4), *ntp_styles)) except Exception, e: lines.append(' NTP offset: Unknown (%s)' % str(e)) lines.append(' System UTC time: ' + datetime.datetime.utcnow().__str__()) lines.append('') lines = ['UUID', '======', ''] try: uuid = get_uuid() lines.append(' System uuid: ' + str(uuid)) except Exception, e: lines.append(' System uuid: Unknown (%s)' % str(e))
def to_dict(self): status_info = AgentStatus.to_dict(self) status_info['hostnames'] = {} metadata_whitelist = ['hostname', 'fqdn', 'ipv4', 'instance-id'] if self.host_metadata: for key, host in self.host_metadata.iteritems(): for whitelist_item in metadata_whitelist: if whitelist_item in key: status_info['hostnames'][key] = host break status_info['checks'] = {} check_statuses = self.check_statuses + get_jmx_status() for cs in check_statuses: status_info['checks'][cs.name] = {'instances': {}} if cs.init_failed_error: status_info['checks'][cs.name]['init_failed'] = True status_info['checks'][cs.name]['traceback'] = \ cs.init_failed_traceback or cs.init_failed_error else: status_info['checks'][cs.name] = {'instances': {}} status_info['checks'][cs.name]['init_failed'] = False for s in cs.instance_statuses: status_info['checks'][cs.name]['instances'][ s.instance_id] = { 'status': s.status, 'has_error': s.has_error(), 'has_warnings': s.has_warnings(), } if s.has_error(): status_info['checks'][cs.name]['instances'][ s.instance_id]['error'] = s.error if s.has_warnings(): status_info['checks'][cs.name]['instances'][ s.instance_id]['warnings'] = s.warnings status_info['checks'][ cs.name]['metric_count'] = cs.metric_count status_info['checks'][cs.name]['event_count'] = cs.event_count status_info['checks'][ cs.name]['service_check_count'] = cs.service_check_count status_info['emitter'] = [] for es in self.emitter_statuses: check_status = { 'name': es.name, 'status': es.status, 'has_error': es.has_error(), } if es.has_error(): check_status['error'] = es.error status_info['emitter'].append(check_status) osname = config.get_os() try: status_info['confd_path'] = config.get_confd_path(osname) except config.PathNotFound: status_info['confd_path'] = 'Not found' try: status_info['checksd_path'] = config.get_checksd_path(osname) except config.PathNotFound: status_info['checksd_path'] = 'Not found' try: ntp_offset, ntp_style = get_ntp_info() warn_ntp = len(ntp_style) > 0 status_info["ntp_offset"] = round(ntp_offset, 4) except Exception as e: ntp_offset = "Unknown (%s)" % str(e) warn_ntp = True status_info["ntp_offset"] = ntp_offset status_info["ntp_warning"] = warn_ntp status_info["utc_time"] = datetime.datetime.utcnow().__str__() try: uuid = str(get_uuid()) except Exception as e: uuid = "Unknown (%s)" % str(e) status_info["uuid"] = uuid return status_info
def process(dir_path, filename): # logger.info(filename) file_path = os.path.join(dir_path, filename) if not os.path.isfile(file_path): return False if not filename.lower().endswith(".pdf"): return False # logger.info(file_path) fp = file(file_path, "rb") pdfReader = PdfFileReader(fp) if pdfReader.isEncrypted: fp.close() logger.info("File encrypted! filename: %s", filename) decrypt_pdf(file_path) fp = file(file_path, "rb") pdfReader = PdfFileReader(fp) # creationDate = pdfReader.documentInfo.get("/CreationDate") # if not isinstance(creationDate, str): # try: # creationDate = creationDate.getObject() # except: # traceback.print_exc() # return False pages = pdfReader.getNumPages() fp.close() # try: # datestring = creationDate[2:-7] # ts = strptime(datestring, "%Y%m%d%H%M%S") # except: # traceback.print_exc() # return False # dt = datetime.fromtimestamp(mktime(ts)) - timedelta(hours=8) ts = os.path.getctime(file_path) dt = datetime.fromtimestamp(ts) - timedelta(hours=8) size = os.path.getsize(file_path) title = filename[0:-4].strip() source = None if u":" in title: strs = title.split(u":", 1) source = strs[0] title = strs[1] md5 = util.get_file_md5(file_path) if check_file_exists(md5, title): return True fileid = util.get_uuid() logger.info("%s, %s, %s, %s, %s, %s", title, size, dt, pages, md5, fileid) oss = oss2_helper.Oss2Helper("xiniudata-report") fp = file(file_path, "rb") oss.put(fileid, fp, headers={ "Content-Type": "application/pdf", "x-oss-meta-filename": filename.strip() }) fp.close() save(source, filename, title, size, dt, pages, md5, fileid) return True
def process(rep): res = 0 while True: delete() res += 1 if res > 20: return False run(rep["durl"]) logger.info("saving done") file_path = "download.pdf" if not os.path.isfile(file_path): return False # logger.info(file_path) # try: # fp = open(file_path, "rb") # pdfReader = PdfFileReader(fp) # logger.info("read done") # if pdfReader.isEncrypted: # return False # # except: # continue # pages = pdfReader.getNumPages() pages, pdfcreationDate = getPage(file_path) if pdfcreationDate is None: return False # fp.close() size = os.path.getsize(file_path) md5 = util.get_file_md5(file_path) if check_file_exists(md5, rep["title"]): return False fileid = util.get_uuid() logger.info("%s, %s, %s, %s, %s, %s", rep["title"], size, pdfcreationDate, pages, md5, fileid) oss = oss2_helper.Oss2Helper("xiniudata-report") fp = file(file_path, "rb") oss.put(fileid, fp, headers={ "Content-Type": "application/pdf", "x-oss-meta-filename": rep["filename"] }) fp.close() mongo = db.connect_mongo() mongo.article.report.insert_one({ "source": rep["source"], "description": None, "title": rep["title"], "filename": rep["filename"], "size": size, "pdfCreationDate": pdfcreationDate, "pages": pages, "md5": md5, "fileid": fileid, "createTime": datetime.datetime.now() - datetime.timedelta(hours=8), "modifyTime": datetime.datetime.now() - datetime.timedelta(hours=8), "type": 78001 }) mongo.close() return True
def _build_payload(self, start_event=True): """ Return an dictionary that contains all of the generic payload data. """ now = time.time() payload = { 'collection_timestamp': now, 'os' : self.os, 'python': sys.version, 'agentVersion' : self.agentConfig['version'], 'apiKey': self.agentConfig['api_key'], 'events': {}, 'metrics': [], 'service_checks': [], 'resources': {}, 'internalHostname' : self.hostname, 'uuid' : get_uuid(), 'host-tags': {}, 'external_host_tags': {} } # Include system stats on first postback if start_event and self._is_first_run(): payload['systemStats'] = self.agentConfig.get('system_stats', {}) # Also post an event in the newsfeed payload['events']['System'] = [{'api_key': self.agentConfig['api_key'], 'host': payload['internalHostname'], 'timestamp': now, 'event_type':'Agent Startup', 'msg_text': 'Version %s' % get_version() }] # Periodically send the host metadata. if self._should_send_additional_data('metadata'): # gather metadata with gohai try: if get_os() != 'windows': command = "gohai" else: command = "gohai\gohai.exe" gohai_metadata = subprocess.Popen( [command], stdout=subprocess.PIPE ).communicate()[0] payload['gohai'] = gohai_metadata except OSError as e: if e.errno == 2: # file not found, expected when install from source log.info("gohai file not found") else: raise e except Exception as e: log.warning("gohai command failed with error %s" % str(e)) payload['systemStats'] = get_system_stats() payload['meta'] = self._get_metadata() self.metadata_cache = payload['meta'] # Add static tags from the configuration file host_tags = [] if self.agentConfig['tags'] is not None: host_tags.extend([unicode(tag.strip()) for tag in self.agentConfig['tags'].split(",")]) if self.agentConfig['collect_ec2_tags']: host_tags.extend(EC2.get_tags(self.agentConfig)) if host_tags: payload['host-tags']['system'] = host_tags GCE_tags = GCE.get_tags(self.agentConfig) if GCE_tags is not None: payload['host-tags'][GCE.SOURCE_TYPE_NAME] = GCE_tags # Log the metadata on the first run if self._is_first_run(): log.info("Hostnames: %s, tags: %s" % (repr(self.metadata_cache), payload['host-tags'])) # Periodically send extra hosts metadata (vsphere) # Metadata of hosts that are not the host where the agent runs, not all the checks use # that external_host_tags = [] if self._should_send_additional_data('external_host_tags'): for check in self.initialized_checks_d: try: getter = getattr(check, 'get_external_host_tags') check_tags = getter() external_host_tags.extend(check_tags) except AttributeError: pass if external_host_tags: payload['external_host_tags'] = external_host_tags return payload
class UserBasicMixin(models.Model): """ An abstract base class implementing a basic User model. Email and password are required. Other fields are optional. """ id = models.CharField( primary_key=True, editable=False, db_index=True, max_length=32, default=get_uuid(), ) date_joined = models.DateTimeField(_('date joined'), default=timezone.now) email = LowerCaseEmailField( _('email address'), blank=False, unique=True, ) first_name = models.CharField( _('first name'), max_length=50, blank=True, ) last_name = models.CharField( _('last name'), max_length=50, blank=True, ) is_active = models.BooleanField( _('active'), default=True, help_text=_('If true, then this user is active'), ) is_public = models.BooleanField( _('public'), default=True, help_text=_('If true, then this user is public'), ) class Meta: abstract = True def get_absolute_url(self): return "/users/%s/" % urlquote(self.uuid) def get_full_name(self): """ Returns the first_name plus the last_name, with a space in between. """ full_name = '%s %s' % (self.first_name, self.last_name) return full_name.strip() def get_short_name(self): "Returns the short name for the user." return self.first_name def email_user(self, subject, message, from_email=None): """ Sends an email to this User. """ send_mail(subject, message, from_email, [self.email])
def _build_payload(self, start_event=True): """ Return an dictionary that contains all of the generic payload data. """ now = time.time() payload = { 'collection_timestamp': now, 'os': self.os, 'python': sys.version, 'agentVersion': self.agentConfig['version'], 'apiKey': self.agentConfig['api_key'], 'events': {}, 'metrics': [], 'service_checks': [], 'resources': {}, 'internalHostname': self.hostname, 'uuid': get_uuid(), 'host-tags': {}, 'external_host_tags': {} } # Include system stats on first postback if start_event and self._is_first_run(): payload['systemStats'] = self.agentConfig.get('system_stats', {}) # Also post an event in the newsfeed payload['events']['System'] = [{ 'api_key': self.agentConfig['api_key'], 'host': payload['internalHostname'], 'timestamp': now, 'event_type': 'Agent Startup', 'msg_text': 'Version %s' % get_version() }] # Periodically send the host metadata. if self._should_send_additional_data('metadata'): # gather metadata with gohai try: if get_os() != 'windows': command = "gohai" else: command = "gohai\gohai.exe" gohai_metadata = subprocess.Popen( [command], stdout=subprocess.PIPE).communicate()[0] payload['gohai'] = gohai_metadata except OSError as e: if e.errno == 2: # file not found, expected when install from source log.info("gohai file not found") else: raise e except Exception as e: log.warning("gohai command failed with error %s" % str(e)) payload['systemStats'] = get_system_stats() payload['meta'] = self._get_metadata() self.metadata_cache = payload['meta'] # Add static tags from the configuration file host_tags = [] if self.agentConfig['tags'] is not None: host_tags.extend([ unicode(tag.strip()) for tag in self.agentConfig['tags'].split(",") ]) if self.agentConfig['collect_ec2_tags']: host_tags.extend(EC2.get_tags(self.agentConfig)) if host_tags: payload['host-tags']['system'] = host_tags GCE_tags = GCE.get_tags(self.agentConfig) if GCE_tags is not None: payload['host-tags'][GCE.SOURCE_TYPE_NAME] = GCE_tags # Log the metadata on the first run if self._is_first_run(): log.info("Hostnames: %s, tags: %s" % (repr(self.metadata_cache), payload['host-tags'])) # Periodically send extra hosts metadata (vsphere) # Metadata of hosts that are not the host where the agent runs, not all the checks use # that external_host_tags = [] if self._should_send_additional_data('external_host_tags'): for check in self.initialized_checks_d: try: getter = getattr(check, 'get_external_host_tags') check_tags = getter() external_host_tags.extend(check_tags) except AttributeError: pass if external_host_tags: payload['external_host_tags'] = external_host_tags return payload
def test_get_uuid(): first_call_result = get_uuid() second_call_result = get_uuid() assert first_call_result == second_call_result
def upload_crop(image_path, ext): response = {} pin_width = 0 pin_height = 0 if not os.path.exists(image_path): response["status"] = True response["message"] = "Not found: %s" % image_path return response image_ext = ext[1:].upper() if image_ext == "JPG": image_ext = "JPEG" store_dir = _get_image_dir(UPLOAD_DIR) base_name = get_uuid() source_name = "%s_source%s" % (base_name, ext) source_path = os.path.join(store_dir, source_name) thumb_name = "%s_thumb%s" % (base_name, ext) thumb_path = os.path.join(store_dir, thumb_name) middle_name = "%s_mid%s" % (base_name, ext) middle_path = os.path.join(store_dir, middle_name) # source try: os.rename(image_path, source_path) except Exception: Log.error("Save source error: %s" % image_path) response["status"] = False response["message"] = "Save source error: %s" % image_path return response img = Image.open(source_path) # middle dest_width = MIDDLE_WIDTH width, height = img.size if width < dest_width or height < dest_width: response["status"] = False response["message"] = "Image size too small" return response dest_height = int(float(dest_width) * float(height) / float(width)) img_mid = img.resize((dest_width, dest_height), Image.ANTIALIAS) img_mid.save(middle_path, image_ext, quality=150) pin_width, pin_height = (dest_width, dest_height) # thumb dest_width, dest_height = THUMB_SIZE left, upper, right, lowwer = 0, 0, dest_width, dest_height crop_width, crop_height = dest_width, dest_height if float(dest_width) / float(dest_height) < float(width) / float(height): crop_height = height crop_width = int(height * (float(dest_width) / float(dest_height))) left = int((width - crop_width) / 2) right = left + crop_width lowwer = height else: crop_width = width crop_height = int(width * (float(dest_height) / float(dest_width))) upper = int((height - crop_height) / 2) lowwer = upper + crop_height right = width box = (left, upper, right, lowwer) img_thumb = img.crop(box) img_thumb = img_thumb.resize((dest_width, dest_height), Image.ANTIALIAS) img_thumb.save(thumb_path, image_ext, quality=150) response["status"] = True response["source_path"] = source_path response["thumb_path"] = thumb_path response["middle_path"] = middle_path response["height"] = pin_height response["width"] = pin_width return response
def handler(event, context): log.debug("Received event {}".format(json.dumps(event))) if "Records" in event: # from SNS/CFN snsData = json.loads(event["Records"][0]["Sns"]["Message"]["message"]) log.debug("From SNS: %s" % snsData) message = snsData["message"] else: # from APIG or CLI call if "httpMethod" in event: log.debug("Context: %s" % event['httpMethod']) if event["httpMethod"] == "POST": # create #env_name = event['params']['path']['envname'] env_name = "dev" config_path = os.path.join(here, ENV_CONFIG_FILENAME) with open(config_path) as json_file: config_dict = json.load(json_file) env_dict = config_dict[env_name] app_name = config_dict['app_name'] app_env = '%s-%s' % (app_name, env_name) table = dynamodb.Table('BlargotronJobs') jobId = get_uuid() timestamp = int(time.time()) log.debug("timestamp: %s" % timestamp) table.put_item( Item={ 'jobId': jobId, 'timestamp': timestamp, 'env_name': env_name, 'steps': [ { 'name': 'createEnvironment', 'template': ENV_CONFIG_FILENAME, 'status': 'WAITING' }, { 'name': 'deployBeanstalkApp', 'template': 'ebapp.json', 'status': 'WAITING' } ] } ) # now, actually do work template_body = generate_application_template(config_dict) apply_template(app_name, template_body, wait=True) # # template_body = generate_env_template(app_env, env_dict) # apply_template(app_env, template_body, notify=True) elif event["httpMethod"] == "PUT": # update log.debug("APIG call: PUT") else: # CLI call log.debug("CLI call") return {}
def upload_file(): if request.method == 'POST': username = request.form["username"] if username is None or len(username) == 0: return render_template("index.html") file = request.files['upload_file'] if file and util.allowed_file(file.filename): filename = secure_filename(file.filename) dirpath = os.path.join(app.config['UPLOAD_TEST_FOLDER'], username) align_dirpath = os.path.join( app.config['UPLOAD_TEST_ALIGN_FOLDER'], username) align_filepath = os.path.join(align_dirpath, filename) shutil.rmtree(app.config['UPLOAD_TEST_FOLDER']) shutil.rmtree(app.config['UPLOAD_TEST_ALIGN_FOLDER']) os.mkdir(app.config['UPLOAD_TEST_FOLDER']) os.mkdir(app.config['UPLOAD_TEST_ALIGN_FOLDER']) os.mkdir(dirpath) filepath = os.path.join(dirpath, filename) file.save(filepath) des = 'tmp/%s' % util.get_uuid() src = 'tmp/%s' % util.get_uuid() shutil.copytree(dirpath, os.path.join(src, username)) net.align_dataset(des, src, 0.25, True, 32, 160) shutil.copytree(os.path.join(des, username), os.path.join('uploads/test_align', username)) shutil.rmtree(src) shutil.rmtree(des) res = net.classify(False, 'CLASSIFY', app.config['UPLOAD_TEST_ALIGN_FOLDER'], 20, 10, '20170512-110547/20170512-110547.pb', 'classifiers/%s_classifier.pkl' % username, 1000, 160) if res[0][1] == username and res[0][2] >= 0.90: # generate a random filename pic_name = util.get_uuid() + '.png' pic_align_name = util.get_uuid() + '.png' des_path = os.path.join( os.path.join(app.config['UPLOAD_FOLDER'], username), pic_name) align_des_path = os.path.join( os.path.join(app.config['UPLOAD_TRAIN_ALIGN_FOLDER'], username), pic_align_name) shutil.move(filepath, des_path) shutil.move( align_filepath.split('.')[0] + '.png', align_des_path) return render_template( 'index.html', # img_path='uploads/train/%s/%s' % (username, pic_name), img_align_path=align_des_path, username=username, prob=res[0][2]) else: return render_template( 'index.html', img_path=filepath, # img_align_path='uploads/test_align/%s/%s' %(username, filename), err_msg='not the {} picture'.format(username)) else: return render_template('index.html')
def upload_crop(image_path, ext): response = {} pin_width = 0 pin_height = 0 if not os.path.exists(image_path): response["status"] = True response["message"] = "Not found: %s" % image_path return response image_ext = ext[1:].upper() if image_ext == "JPG": image_ext = "JPEG" store_dir = _get_image_dir(PHOTO_PATH) base_name = get_uuid() source_name = "%s_source%s" % (base_name, ext) source_path = os.path.join(store_dir, source_name) thumb_name = "%s_thumb%s" % (base_name, ext) thumb_path = os.path.join(store_dir, thumb_name) middle_name = "%s_mid%s" % (base_name, ext) middle_path = os.path.join(store_dir, middle_name) # source try: os.rename(image_path, source_path) except Exception: Log.error("Save source error: %s" % image_path) response["status"] = False response["message"] = "Save source error: %s" % image_path return response img = Image.open(source_path) # middle dest_width = MIDDLE_WIDTH width, height = img.size if width < dest_width or height < dest_width: response["status"] = False response["message"] = "Image size too small" return response dest_height = int(float(dest_width) * float(height) / float(width)) img_mid = img.resize((dest_width, dest_height), Image.ANTIALIAS) img_mid.save(middle_path, image_ext, quality=150) pin_width, pin_height = (dest_width, dest_height) # thumb dest_width, dest_height = THUMB_SIZE left, upper, right, lowwer = 0, 0, dest_width, dest_height crop_width, crop_height = dest_width, dest_height if float(dest_width)/float(dest_height) < float(width)/float(height): crop_height = height crop_width = int(height * (float(dest_width) / float(dest_height))) left = int((width - crop_width) / 2) right = left + crop_width lowwer = height else: crop_width = width crop_height = int(width * (float(dest_height) / float(dest_width))) upper = int((height - crop_height) / 2) lowwer = upper + crop_height right = width box = (left, upper, right, lowwer) img_thumb = img.crop(box) img_thumb = img_thumb.resize((dest_width, dest_height), Image.ANTIALIAS) img_thumb.save(thumb_path, image_ext, quality=150) response["status"] = True response["source_path"] = source_path response["thumb_path"] = thumb_path response["middle_path"] = middle_path response["height"] = pin_height response["width"] = pin_width return response