def __init__(self, hdfs_file, input_type, output_type, name=None): file_suffix = hdfs_file[-3:] if not (file_suffix == '.so' or file_suffix == '.ll'): raise ValueError('Invalid file type. Must be .so or .ll ') self.hdfs_file = hdfs_file inputs = [ir._validate_type(x) for x in input_type] output = ir._validate_type(output_type) new_name = name if not name: string = self.so_symbol for in_type in inputs: string += in_type new_name = sha1(string).hexdigest() UDFInfo.__init__(self, inputs, output, new_name)
def __init__(self, hdfs_file, input_type, output_type, name=None): file_suffix = hdfs_file[-3:] if not(file_suffix == '.so' or file_suffix == '.ll'): raise ValueError('Invalid file type. Must be .so or .ll ') self.hdfs_file = hdfs_file inputs = [ir._validate_type(x) for x in input_type] output = ir._validate_type(output_type) new_name = name if not name: string = self.so_symbol for in_type in inputs: string += in_type new_name = sha1(string).hexdigest() UDFInfo.__init__(self, inputs, output, new_name)
def _validate(self, args, i): arg = args[i] if isinstance(arg, py_string): arg = arg.lower() arg = args[i] = ir._validate_type(arg) return arg
def __init__(self, names, types): from ibis.expr.types import _validate_type if not isinstance(names, list): names = list(names) self.names = names self.types = [_validate_type(x) for x in types] self._name_locs = dict((v, i) for i, v in enumerate(self.names)) if len(self._name_locs) < len(self.names): raise com.IntegrityError('Duplicate column names')
def _operation_type_conversion(inputs, output): in_type = [ir._validate_type(x) for x in inputs] in_values = [rules.value_typed_as(_convert_types(x)) for x in in_type] out_type = ir._validate_type(output) out_value = rules.shape_like_flatargs(out_type) return (in_values, out_value)