def csv_json(id_one, id_two, rows, data): errors = {} response = [] for row, row_number, errors in rows: if errors: errors[row_number] = errors else: response.append(row) if errors: return rest.error(errors) else: return rest.created(response)
def PUT(self, request): return rest.created(request.body is None)
def csv_with_fieldnames(rows): for schema in rows: self.seen_dog_names.append(schema.dog_type.get()) return rest.created({'csv': 'created'})
def csv_with_fieldnames(rows): for schema in rows: self.seen_dog_names.append(schema.dog_type.get()) return rest.created({"csv": "created"})
def post(self): input = request.get_json(force=True) logger.info('/projects received: %s' % (input)) project_name = input.get('project_name', '') if len(project_name) == 0 or len(project_name) >= 256: return rest.bad_request('Invalid project name.') if project_name in data['projects']: #msg = 'You cannot post an existing project to the /projects endpoint. For updates, post to projects/{your_project_name}' msg = 'You cannot post an existing project to the /projects endpoint.' return rest.bad_request(msg) project_sources = input.get('sources', []) if len(project_sources) == 0: return rest.bad_request('Invalid sources.') logger.info('/projects project_name: %s' % (project_name)) logger.info('/projects project_sources: %s' % (project_sources)) try: # create project data structure, folders & files project_dir_path = _get_project_dir_path(project_name) project_lock.acquire(project_name) logger.info('/projects creating directory: %s' % (project_dir_path)) os.makedirs(project_dir_path) data['projects'][project_name] = {'sources': {}} data['projects'][project_name]['project_name'] = project_name data['projects'][project_name]['sources'] = project_sources with open(os.path.join(project_dir_path, 'project_config.json'), 'w') as f: f.write(json.dumps(data['projects'][project_name], indent=4, default=json_encode)) # we should try to create a service for domain "sources:type" # (or update it if timerange defined by "sources:start_date" and "sources:end_date" is bigger than existing) ret, domain_name, ingestion_id, job_id, err = check_domain_service(project_sources, project_name) data['projects'][project_name]['domain'] = domain_name if ret==0: msg = 'project %s created.' % project_name logger.info(msg) # store job infos data['projects'][project_name]['ingestion_id'] = ingestion_id data['projects'][project_name]['job_id'] = job_id data['projects'][project_name]['status'] = 'indexing' # insert into mongoDB logger.info('Project %s (before mongodb insertion) dict keys are %s' % (project_name, data['projects'][project_name].keys())) db_projects.insert_one(data['projects'][project_name]) logger.info('Project %s (after mongodb insertion) dict keys are %s' % (project_name, data['projects'][project_name].keys())) # How come data['projects'][project_name] has an '_id' field now??? if '_id' in data['projects'][project_name]: del data['projects'][project_name]['_id'] logger.info('Project %s (after mongodb insertion and cleaning) dict keys are %s' % (project_name, data['projects'][project_name].keys())) try: return rest.created(msg) finally: restart_apache() elif ret==1: msg = 'domain for project %s was already previously created. %s' % (project_name, err) logger.info(msg) # what should we return in this case return rest.ok(msg) else: # we should remove project_name del data['projects'][project_name] msg = 'project %s creation failed while creating search service: %s' % (project_name, err) logger.info(msg) return rest.internal_error(msg) except Exception as e: # try to remove project_name try: del data['projects'][project_name] except: pass # try to remove data files too try: shutil.rmtree(os.path.join(_get_project_dir_path(project_name))) except: pass msg = 'project {} creation failed: {} {}'.format(project_name, e, sys.exc_info()[0]) logger.error(msg) return rest.internal_error(msg) finally: project_lock.release(project_name)