def __str__(self): grouped = Flow(self.components).GroupBy(isa(type=Attribute)).Unboxed() others = grouped.get(False) attributes = grouped.get(True) if not others: others = () if not attributes: attributes = () if not is_unque(map(lambda a: a.name, attributes)): render_attributes = defaultdict(list) for each in attributes: attribute = render_attributes[each.name] attribute.append(each.components) attributes = tuple( Attribute(name, *sum(configs, ())) for name, configs in render_attributes.items()) return Format.Tag.format( name=self.name, indent=Format.Indent * self.indent, components='\n'.join(map(str, others)) if others else '\n' if self.indent <= 1 else '', attributes=f' {" ".join(map(str, attributes))}' if attributes else '')
def test_other(): def block_lambda(e): e = e + 1 if e < 10: return e else: raise StopIteration res = Flow(MGenerator(block_lambda, 0)).Take(100).ToList() assert res.__str__() == res.__repr__()
def delete(strategy: Strategy): if strategy.suffixes is all: log(strategy.dir, operation='remove', then_call=partial(shutil.rmtree, ignore_errors=True)) return (Flow(recur_listdir(strategy.dir)).Filter(endswith( strategy.suffixes)).Each( log.within(operation='remove', then_call=os.remove)))
def gen_functions(files): generated = [] for codes in files: sources = codes.split('\n') Flow(sources) \ .Enum() \ .Map(lambda i, x: (i + 1, get_class_value(x)) if x.startswith('@extension_') else None) \ .Filter(lambda x: x) \ .Map(lambda i, value: parser(_parser(token(sources[i]), meta=MetaInfo(), partial=True) , value)) \ .Filter(lambda x: x) \ .Then(generated.extend) return '\n'.join(generated)
from .core.ParserC import * from .__release_info__ import * from Redy.Tools.PathLib import Path import os from linq import Flow RBNF_HOME = 'RBNF_HOME' _root_dir = Path(__file__).parent() home = Path(os.environ.get(RBNF_HOME, '~/.rbnf')) if RBNF_HOME not in os.environ: os.environ[RBNF_HOME] = home.__str__() if not home.exists(): Flow(_root_dir.into('rbnf_libs').list_dir()).each( lambda it: it.move_to(home))
def test_GroupBy(): Flow([(1, 2), (2, 3), (3, 2)]).GroupBy(lambda x, y: x + y).ToTuple() Flow([1, 1, 1]).GroupBy().ToList()
def test_Extend(): Flow([(1, 2), (2, 2), (3, 3)]).Extend([(1, 2), (2, 2), (3, 3)])
def test_Reversed(): Flow([(1, 2), (2, 2), (3, 3)]).Reversed()
def call(): global seq seq = Flow(MGenerator(lambda x: x + 1, start_elem=0)) # [0..\infty] func.__globals__['seq'] = seq func()
def test_example5(): """ Example 5: """ @extension_class(dict) def ToTupleGenerator(self: dict): return Flow(((k, v) for k, v in self.items())).ToTuple().Unboxed() try: seq.Take(10).ToTupleGenerator() except Exception as e: print(e.args) """ NameError: No extension method named `ToTupleGenerator` for builtins.object. """ print(seq.Take(10).Zip(seq.Take(10)).ToDict().ToTupleGenerator()) test_example5() @my_test def test_extension_byclsname(): @extension_class_name('generator') def MyNext(self): return next(self) test_extension_byclsname() Flow((i for i in range(10))).MyNext()
def test_Each(): Flow([(1, 2), (2, 3), (3, 2)]).Each(lambda x, y: x + y)
def test_Filter(): Flow([(1, 2), (2, 3), (3, 2)]).Filter(lambda x, y: x + y) Flow([(1, 1), (2, 2), (3, 2)]).Filter(lambda x, y: x is y).Filter().Filter( lambda x: x != (3, 2)).All()
def test_Scan(): Flow([2, 3, 5]).Scan(lambda last, now: last + now, 0).ToList()
def test_Sum(): Flow([(1, 2), (2, 3), (3, 2)]).Sum(lambda x, y: x + y)
def test_Union(): Flow({(1, 1), (2, 2), (3, 3)}).Union([(1, 2), (2, 2), (3, 3)])
def test_Then(): Flow([(1, 2), (2, 3)]).Then(lambda x: x) Flow([(1, 2), (2, 3)]).Then(lambda x, y: x + y)
def test_Intersects(): Flow({(1, 1), (2, 2), (3, 3)}).Intersects([(1, 2), (2, 2), (3, 3)])
def getitems(self): return Flow( self.result.nodes).Map(lambda n: self.generateitem(n)).Unboxed()
def getitems(self): return Flow(self.data["features"]).Map( lambda n: self.generateitem(n)).Unboxed()
def test_Aggregate(): Flow([1, 2, 3, 4, 5]).Aggregate(max, min, sum).ToTuple()
def ToTupleGenerator(self: dict): return Flow(((k, v) for k, v in self.items())).ToTuple().Unboxed()
import GraphEngine.ffi import Trinity import os, clr from linq import Flow from Trinity.FFI import Agent import Trinity IncludeDirectory = os.path.join(GraphEngine.__path__[0], 'ffi') StorageRoot = os.path.abspath('storage') TSLCodeGenExeLocation = os.path.join(GraphEngine.__path__[0], 'Command', 'Trinity.TSL.CodeGen.exe') DotNetExeLocation = 'dotnet.exe' Agent.Configure(IncludeDirectory, StorageRoot, TSLCodeGenExeLocation, DotNetExeLocation, 10, 10, 10) Trinity.Global.LocalStorage.LoadStorage() Agent.Initialize() if not os.path.exists(r"storage\composite-helper\Trinity.Extension.abc.dll"): tsl_path = os.path.abspath('./tests/tsl') Agent.LoadTSL(*[tsl_path, tsl_path, "abc", None]) Flow( Trinity.Global.StorageSchema.CellDescriptors).Each(lambda cell_desc: print( f'{cell_desc.TypeName}{list(cell_desc.GetFieldNames())}')) Trinity.Global.LocalStorage.SaveStorage() Agent.Uninitialize()
def test_Zip(): Flow([(1, 2), (2, 3), (3, 2)]).Zip([(1, 2), (2, 2), (3, 3)])
def test_Sort(): Flow([(1, 2), (2, 2), (3, 3)]).Sort(lambda x, y: x + y)
def test_Sorted(): Flow([(1, 2), (2, 3), (3, 2)]).Sorted(lambda x, y: x + y) Flow([1, 2, 3]).Sorted().Sorted(by=lambda x: -x)
def test_ArgSorted(): Flow([(1, 2), (2, 3), (3, 2)]).ArgSorted(lambda x, y: x + y) Flow([3, 2, 1]).ArgSorted() Flow([(1, 1), (2, 2), (3, 1)]).ArgSorted(by=lambda a, b: a * b).ToList()
}] (Flow( map(Strategy.new, [ *additional, { "./GraphEngine/ffi": suffixes1 + suffixes2 + suffixes3 + ['.sig'] }, { "./GraphEngine.egg-info": all }, { "./__pycache__": all }, { "./build": all }, { "./dist": all }, { 'storage': all }, { "GraphEngine/ffi/storage": all }, { 'cache': all }, { 'GraphEngine/ffi/A': all }, { 'GraphEngine/ffi/B': all }, { 'GraphEngine/ffi/composite-helper': all }, { 'GraphEngine/ffi/write_ahead_log': all } ])).Each(delete))
def test_Group(): Flow([(1, 2), (2, 3), (3, 2)]).Group(lambda x, y: x + y).ToTuple() Flow([1, 1, 2, 3, 3]).Group().Map(lambda _: (len(_), len(_))).Group( lambda a, b: a * b).ToTuple()
train_data_size = 500 test_data_size = 100 epochs = 115 batch_group_num = 3 lr = 0.01 loss_fn = torch.nn.MSELoss(size_average=False) def to_batch(image): target, *samples = image return ( np.stack(samples), # X np.stack([target] * len(samples))) raw_sources = Flow(os.listdir(train_dir)) def DataIOStream(raw_src: Flow, num: int): return (raw_src.Take(num).Filter( lambda x: x.endswith('.jpg')) # select jpg files/选取jpg格式文件 .Map(lambda x: [os.path.join(train_dir, x)] + [ os.path.join(test_dir, x[:-4] + "_" + str(i) + '.jpg') for i in range(1, 3) ]) # 将噪声数据和真实数据进行合并 .Map(lambda img_file_names: list( map( and_then( data.imread, # 读取图像 img_as_float), # 浮点数张量 [0, 255]->[0, 1] img_file_names))).Map(to_batch))
def test_Next(): Flow((i for i in range(3))).Next()