def FetchBase(base, patch): """Fetch the content of the file to which the file is relative. Args: base: the base property of the Issue to which the Patch belongs. patch: a models.Patch instance. Returns: A models.Content instance. Raises: FetchError: For any kind of problem fetching the content. """ filename, lines = patch.filename, patch.lines rev = patching.ParseRevision(lines) if rev is not None: if rev == 0: # rev=0 means it's a new file. return models.Content(text=db.Text(u''), parent=patch) try: base = db.Link(base) except db.BadValueError: msg = 'Invalid base URL: %s' % base logging.warn(msg) raise FetchError(msg) url = _MakeUrl(base, filename, rev) logging.info('Fetching %s', url) try: result = urlfetch.fetch(url) except Exception, err: msg = 'Error fetching %s: %s: %s' % (url, err.__class__.__name__, err) logging.warn('FetchBase: %s', msg) raise FetchError(msg)
def admin_main(): contentForm = models.content_form(request.form) if request.method == "POST" and contentForm.validate(): app.logger.debug(request.form) newContent = models.Content() newContent.title = request.form.get('title') newContent.content = request.form.get('content') #link to current user newContent.user = current_user.get() try: newContent.save() except: e = sys.exc_info() app.logger.error(e) return redirect('/admin') else: templateData = { 'allContent': models.Content.objects(user=current_user.id), 'current_user': current_user, 'form': contentForm, 'formType': 'New' } return render_template('admin.html', **templateData)
def create_content(change_id, url_path, content_type, data): """Store uploaded content. Args: change_id: The ID of the change, as an int. url_path: The URL path of the content being uploaded. content_type: The MIME content type for the data. data: The data of the content. """ content_key = models.Content.get_key(url_path, change_id) content = models.Content(key=content_key, content_type=content_type, data=data) content.put() logging.info( 'Created Content for url_path=%r change_id=%r, data length=%d', url_path, change_id, len(data))
def make_models(post): comp = m.Company.objects.get_or_create(name=post.company)[0] print(post) comp.save() jp = m.JobPost.objects.get_or_create(url=url(post.title), position=post.position, company=comp)[0] jp.save() for h in post.content: ph = m.PostHeader.objects.get_or_create(orig_header=h, job_post=jp)[0] ph.save() if type(post.content[h]) == list: for c in post.content[h]: cont = m.Content.objects.get_or_create(text=c.encode('utf-8'), post_header=ph)[0] cont.save() else: cont = m.Content(text=post.content[h], post_header=ph) cont.save()
msg = 'Invalid base URL: %s' % base logging.warn(msg) raise FetchError(msg) url = _MakeUrl(base, filename, rev) logging.info('Fetching %s', url) try: result = urlfetch.fetch(url) except Exception, err: msg = 'Error fetching %s: %s: %s' % (url, err.__class__.__name__, err) logging.warn('FetchBase: %s', msg) raise FetchError(msg) if result.status_code != 200: msg = 'Error fetching %s: HTTP status %s' % (url, result.status_code) logging.warn('FetchBase: %s', msg) raise FetchError(msg) return models.Content(text=ToText(UnifyLinebreaks(result.content)), parent=patch) def _MakeUrl(base, filename, rev): """Helper for FetchBase() to construct the URL to fetch. Args: base: The base property of the Issue to which the Patch belongs. filename: The filename property of the Patch instance. rev: Revision number, or None for head revision. Returns: A URL referring to the given revision of the file. """ scheme, netloc, path, params, query, fragment = urlparse.urlparse(base) if netloc.endswith(".googlecode.com"):
def setUp(self): self.content = t.Content()
def import_data(): filepath = os.path.join(current_app.config.get('BASEDIR'), 'uploads') import_source = request.form.get('import_source') if not import_source == None: import_source = import_source.replace( '/', os.sep ) # Let's be OS independent! Maybe we switch on Windows server one day :) filepath = os.path.join(filepath, import_source) delimiter = request.form.get('csv_separator') if delimiter == None: delimiter = current_app.config.get('DEFAULT_CSV_SEPARATOR') if delimiter == None: delimiter = ',' schema = { 'id': { 'type': 'integer', 'required': True }, 'name': { 'type': 'string', 'required': True, 'maxlength': 32 }, 'description': { 'type': 'string', 'required': False }, 'code': { 'type': 'string', 'required': True, 'maxlength': 30 }, 'status': { 'type': 'string', 'allowed': ['enabled', 'disabled', 'deleted'] } } lookup_indexes = { 0: { 'name': 'id', 'convert_function': utility.int_convert }, 1: { 'name': 'name' }, 2: { 'name': 'description' }, 3: { 'name': 'code' }, 5: { 'name': 'status' } } devices_filepath = os.path.join(filepath, 'devices.csv') device_documents = utility.csv_file_parse_func( schema=schema, filepath=devices_filepath, lookup_indexes=lookup_indexes, delimiter=delimiter, logger=current_app.logger) if device_documents == False: current_app.logger.error( 'Invalid configuration provided for parsing content csv file, filepath: "%s"' % devices_filepath) return jsonify({ 'success': False, 'msg': 'Invalid configuration provided for parsing devices csv file' }) # content schema = { 'name': { 'type': 'string', 'required': True, 'maxlength': 32 }, 'description': { 'type': 'string', 'required': False }, 'device': { 'type': 'integer', 'required': True }, 'expire_date': { 'type': 'datetime' }, 'status': { 'type': 'string', 'allowed': ['enabled', 'disabled', 'deleted'] } } lookup_indexes = { 1: { 'name': 'name' }, 2: { 'name': 'description' }, 3: { 'name': 'device', 'convert_function': utility.int_convert }, 4: { 'name': 'expire_date', 'convert_function': utility.date_convert }, 5: { 'name': 'status' } } content_filepath = os.path.join(filepath, 'content.csv') content_documents = utility.csv_file_parse_func( schema=schema, filepath=content_filepath, lookup_indexes=lookup_indexes, delimiter=delimiter, logger=current_app.logger) if content_documents == False: current_app.logger.error( 'Invalid configuration provided for parsing content csv file, filepath: "%s"' % content_filepath) return jsonify({ 'success': False, 'msg': 'Invalid configuration provided for parsing content csv file' }) try: found_codes = [] for device in device_documents: if device['code'] in found_codes: current_app.logger.error( 'Duplicate code: "%s" was found while parsing content csv file, filepath: "%s"' % (device['code'], content_filepath)) continue found_codes.append(device['code']) existing_device = db_session.query(models.Device) \ .filter(models.Device.code == device['code']). \ first() if existing_device: device = existing_device device.date_updated = datetime.datetime.utcnow().replace( microsecond=0).isoformat() else: device = models.Device(**device) current_time = datetime.datetime.utcnow().replace( microsecond=0).isoformat() device.date_created = current_time device.date_updated = current_time db_session.add(device) db_session.commit() for content in content_documents: content = models.Content(**content) current_time = datetime.datetime.utcnow().replace( microsecond=0).isoformat() content.date_created = current_time content.date_updated = current_time db_session.add(content) db_session.commit() output = {'success': True, 'msg': 'Successfully imported!'} except AssertionError as e: current_app.logger.error( 'Data error! Cannot import csv files to database, error: ' % str(e)) output = { 'success': False, 'msg': 'There was an error during the operation! Please check CSV files that you are sending.' } except exc.SQLAlchemyError as e: current_app.logger.error( 'Database error! Cannot import csv files to database, error: %s' % str(e)) output = {'success': False, 'msg': 'Database error!'} return jsonify(output)