Exemplo n.º 1
0
def yield_rows(muid_strings, file_uid_mapping):
    fields = ['segment_id'] + muid_strings
    yield fields

    field_mapping = {field:i for i, field in enumerate(fields)}
    
    for file_num, (uid, file_mapping) in enumerate(sorted(file_uid_mapping.items(), key=bilarasortkey)):
        data = {}
        segment_ids = set()
        for muid_string in muid_strings:
            if muid_string in file_mapping:
                file = file_mapping[muid_string]
                
                try:
                    file_data = json_load(file)
                except json.decoder.JSONDecodeError:
                    exit(1)
                
                i = field_mapping[muid_string]
                for segment_id, value in file_data.items():
                    if segment_id not in data:
                        data[segment_id] = [segment_id] + [''] * (len(fields) - 1)
                    data[segment_id][i] = value
        
        for segment_id in sorted(data.keys(), key=bilarasortkey):
            yield data[segment_id]
        
        if file_num < len(file_uid_mapping) - 1:
            yield [''] * len(fields)
Exemplo n.º 2
0
	def do_add(self):
		ret_obj = {
			"return_value" : 1,
			"return_desc" : "add failed"
		}
		#add new instance
		if common.is_valid_json(self.para.data) != 0:
			loginfo = "unvalid json string: " + self.para.data
			print loginfo
			ret_obj["return_value"] = 1
			return ret_obj

		#字符串转换成dict类型
		#ss = eval(self.para.data)
		ss = self.para.data

		#处理编码
		str = common.json_dump(ss)
		data = common.json_load(str)

		data["_id"] = self.para.id;
		query = {"_id":self.para.id}
		if self.is_exists(query) == 1:
			ret = self.db_collection.insert(data)
			if ret == self.para.id:
				ret_obj["return_value"] = 0
				return ret_obj
			else:
				print "add failed"
				ret_obj["return_value"] = 1
				return ret_obj
		else:
			print "The record is exists,cannot add! id=" + data["_id"]
			ret_obj["return_value"] = 1
			return ret_obj
Exemplo n.º 3
0
def init_software():
    # install software
    software = json_load(SOFTWARE_INFO)
    if software is None:
        logger.error("The software info is invalid format!")
        return 40

    logger.info("init: install software[%s]..", software)
    if software:
        p_conf = ACTION_PARAM_CONF[ACTION_SOFTWARE_INSTALL]["children"]["software"]
        if not ArgsParser.check("software", software, p_conf):
            logger.error("The software[%s] valid failed!", software)
            return 40
        return install(software, ignore_exist=True)
    return 0
Exemplo n.º 4
0
    def load(self):
        if not pathlib.Path(self.file_name).exists():
            # file doesn't exist, initialize object with default values
            logger.info(f'Initializing {self.file_name}')
            self.job_id = 1
            self.tables = dict()
        else:
            logger.info(f'Loading {self.file_name}')
            """
			with open(self.file_name, 'rb') as input_stream:
				obj = pickle.load(input_stream)
			"""
            obj = json_load(self.file_name)

            # load key attributes
            self.job_id = obj.job_id
            self.tables = obj.tables
Exemplo n.º 5
0
	def do_set_meta_all(self):
		ret_obj = {
			"return_value" : 1,
			"return_desc" : "get-meta-attr failed"
		}

		#check if the data is valid json string
		if common.is_valid_json(self.para.data) != 0:
			loginfo = "The data string is unvalid json string!"
			print loginfo
			self.log.info(loginfo)
			return ret_obj

		#字符编码转换
		ss = common.json_dump(self.para.data)
		data_obj = common.json_load(ss)

		#check if _id is exists and the recored is exists
		if common.is_key_in_dict("_id", data_obj) == 0:
			loginfo = "the key _id cannot be exists in the data string"
			print loginfo
			self.log.info(loginfo)
			return ret_obj

		#override data_obj["_id"]
		data_obj["_id"] = self.para.id

		query = {"_id":self.para.id}
		if self.is_exists(query) != 0:
			loginfo = "The id \"%s\" doesnot in database" % (self.para.id)
			print loginfo
			self.log.info(loginfo)
			return ret_obj

		#这里比较危险可能,set-meta-all 的时候是先删除再insert
		self.db_collection.remove(query)
		#exec the sql
		ret = self.db_collection.insert(data_obj)
		if ret != self.para.id:
			print "update failed"
			return ret_obj
		else:
			ret_obj["return_value"] = 0
		return ret_obj
Exemplo n.º 6
0
	def do_set_meta_attr(self):
		ret_obj = {
			"return_value" : 1,
			"return_desc" : "get-meta-attr failed"
		}

		query = {"_id":self.para.id}
		if self.is_exists(query) != 0:
			loginfo = "The id \"%s\" doesnot in database" % (self.para.id)
			print loginfo
			log.info(loginfo)
			return ret_obj
		if common.is_valid_json(self.para.value) == 0:
			ss = common.json_dump(self.para.value)
			data_obj = common.json_load(ss)
		else:
			data_obj = self.para.value
		#exec the sql
		ret = self.db_collection.update(query,{"$set":{self.para.key:data_obj}})
		ret_obj["return_value"] = 0
		ret_obj["return_desc"] = "set meta attr success"
		return ret_obj
Exemplo n.º 7
0
                        help='Spreadsheet file to import. CSV, TSV, ODS, XLS')
    parser.add_argument('--original', help='Old spreadsheet, used for updates')
    parser.add_argument('-q',
                        '--quiet',
                        help='Do not display changes to files')
    args = parser.parse_args()

    original_rows = pyexcel.iget_records(file_name=args.original)
    new_rows = pyexcel.iget_records(file_name=args.file)

    segment_id_mapping = {}

    for old, new in zip(original_rows, new_rows):
        segment_id_mapping[old['segment_id']] = new['segment_id']

    for file in iter_json_files():
        data = json_load(file)

        new_data = {}
        changed = False

        for k, v in data.items():
            if k in segment_id_mapping:
                k = segment_id_mapping[k]
                changed = True
            new_data[k] = v
        if changed:
            print(f'Updated {file}')
            with file.open('w') as f:
                json.dump(new_data, f, ensure_ascii=False, indent=2)