Esempio n. 1
0
def neighbours_to_svg(Prg, NeighBours, Spirals, Fname="svg_neighbours.html"):
    SvgObj = svg.obj_new()
    for CoordSpiralCenter, CoordsConnected in NeighBours.items():
        for CoordConnected in CoordsConnected:
            # print(Fname, "Debug:", CoordSpiralCenter, CoordConnected)
            SpiralLen = len(Spirals[CoordSpiralCenter])
            # Dash = str(SpiralLen) + "," + str(SpiralLen)
            svg.line(SvgObj,
                     CoordSpiralCenter,
                     CoordConnected,
                     StrokeWidth=SpiralLen,
                     HalfLine=True)

    # the dot has to cover the lines
    for CoordSpiralCenter, CoordsConnected in NeighBours.items():
        svg.dot(SvgObj, CoordSpiralCenter, R=len(Spirals[CoordSpiralCenter]))
        svg.text(SvgObj,
                 CoordSpiralCenter,
                 str(CoordSpiralCenter),
                 Color="green",
                 ShiftXAbs=-20)

    SvgSrc = svg.pack(SvgObj)
    # print(SvgSrc)
    util.file_write(Prg, Fname=Fname, Content=SvgSrc)
Esempio n. 2
0
	def read(self, raw, offset):
		self.data = dict()

		self.data["terrain_border"] = list()
		for i in range(16):
			t = TerrainBorder()
			offset = t.read(raw, offset)
			self.data["terrain_border"] += [t.data]

		#int8_t zero[28];
		#uint16_t terrain_count_additional;
		zero_terrain_count_struct = Struct(endianness + "28c H")
		pc = zero_terrain_count_struct.unpack_from(raw, offset)
		offset += zero_terrain_count_struct.size

		self.data["terrain_count_additional"] = pc[28]

		tmp_struct = Struct(endianness + "12722s")
		t = tmp_struct.unpack_from(raw, offset)
		offset_begin = offset
		offset += tmp_struct.size

		fname = 'raw/terrain_render_data_%d_to_%d.raw' % (offset_begin, offset)
		filename = file_get_path(fname, write=True)
		file_write(filename, t[0])

		return offset
Esempio n. 3
0
def difference_display(Prg, SelfObj, MarksNowDetected, TestWantedResults, AppendToFileIfDifference=None):
    print("Num of Marks now detected: ", len(MarksNowDetected.keys()))
    print("Num of wanted results: ", len( TestWantedResults))

    WantedKeys = sorted(list(TestWantedResults.keys()))

    for Key in sorted(list(MarksNowDetected.keys())):

        # in MarkDetected dict() can be index gaps.
        # example: D = {0:"A", 3:"B", 5, "C"}
        # so we loop over on detected keys and always take the next element from the test -
        # the two dict's keys can be different but the order of the keys are fixed
        MarkDetected = mark_util.mark_to_string(MarksNowDetected[Key])

        MarkWanted = TestWantedResults.get(WantedKeys.pop(0), "Key not in Wanted results: " + str(Key))

        if MarkDetected != MarkWanted:

            if AppendToFileIfDifference:
                util.file_append(Prg, os.path.join(*AppendToFileIfDifference), "\n\n" + MarkDetected)
            else:
                PathDetected = os.path.join(Prg["DirTmpPath"], "test_detected_"+str(Key)+".txt")
                PathWanted   = os.path.join(Prg["DirTmpPath"], "test_wanted_"+str(Key)+".txt")
                util.file_write(Prg, PathDetected, MarkDetected)
                util.file_write(Prg, PathWanted, MarkWanted)
                # theoretically all tests has been ok in released versions, this case happens only in dev time
                print("Dev message: test comparing with vimdiff:")
                os.system("vimdiff " + PathDetected + " " + PathWanted)

                SelfObj.assertEqual(MarkDetected, MarkWanted)
Esempio n. 4
0
def write_feed(posts):
    feedfile = os.path.join("..", "www", "atom.xml")
    feed = atom_feed(posts)
    feedtxt = feed.writeString("utf-8")
    #fo = codecs.open(feedfile, encoding='utf-8', mode="w")
    #feed.write(fo, "utf-8")
    #fo.close()
    util.file_write(feedfile, feedtxt)
Esempio n. 5
0
def write_feed(posts):
  feedfile = os.path.join("..", "www", "atom.xml")
  feed = atom_feed(posts)
  feedtxt = feed.writeString("utf-8")
  #fo = codecs.open(feedfile, encoding='utf-8', mode="w")
  #feed.write(fo, "utf-8")
  #fo.close()
  util.file_write(feedfile, feedtxt)
Esempio n. 6
0
 def test_util_file_read_lines(self):
     if self._test_exec("test_file_read_lines"):
         Prg = self.Prg
         Fname = os.path.join(Prg["DirWork"], "test_file_read_lines.txt")
         util.file_write(Prg, Fname=Fname, Content="cat\ndog\nelephant")
         Lines = util.file_read_lines(Prg, Fname, Strip=True)
         self.assertEqual(Lines, ["cat", "dog", "elephant"])
         util.file_del(Fname)
Esempio n. 7
0
    def test_collect_docs_from_working_dir(self):
        if self._test_exec("test_collect_docs_from_working_dir"):
            Prg = self.Prg

            FileName = "test_file_document_example.txt"
            FilePath = os.path.join(Prg["DirDocuments"], FileName)
            util.file_del(FilePath)
            util.file_write(Prg, Fname=FilePath, Content="example text")
            DocumentsAvailable = document.document_objects_collect_from_dir_documents(
                Prg)

            self.assertIn(util.filename_without_extension(FileName),
                          DocumentsAvailable)
            util.file_del(FilePath)
Esempio n. 8
0
def path_in_char_to_svg(Prg, Paths, Spirals, Fname="svg_paths_in_char.html"):
    SvgObj = svg.obj_new()

    # the dot has to cover the lines
    print("")
    print("Paths:", Paths)
    for Path in Paths:
        SpiralPrev = None
        for Spiral in Path:
            if SpiralPrev:
                svg.line(SvgObj, Spiral, SpiralPrev, StrokeWidth=5)

            print("path in char, svg, Spiral: ", Spiral)
            svg.dot(SvgObj, Spiral, R=len(Spirals[Spiral]))
            svg.text(SvgObj, Spiral, str(Spiral), Color="green", ShiftXAbs=-20)
            SpiralPrev = Spiral

    SvgSrc = svg.pack(SvgObj)
    # print(SvgSrc)
    util.file_write(Prg, Fname=Fname, Content=SvgSrc)
Esempio n. 9
0
def thread_function(ThreadName):
    global ReqCounter

    for i in range(1, MaxI):
        ReqCounter[ThreadName] += 1
        TimeStart = time.time()
        Result = util.web_get(f"{HostPort}/seek?words=looks,like,bird",
                              Verbose=False)
        #print(Result)
        TimeDelta = time.time() - TimeStart

        ReqTempo = int(1.0 / TimeDelta)
        Info = f"{i} {ThreadName} len: {len(Result)}, Tempo {ReqTempo}, req/sec\n"
        util.file_write(dict(),
                        Fname="test_performance.txt",
                        Content=Info,
                        Mode="a",
                        LogCreate=False)
        print(Info)
        time.sleep(5)
Esempio n. 10
0
    def redo(self):
        a = self.actor

        ## if php helper process not running, spawn it
        if a.php_helper is None or a.php_helper.poll() is not None:
            a.php_helper_in = "/tmp/retro/.db_helper.in.%d" % a.pid
            a.php_helper_out = "/tmp/retro/.db_helper.out.%d" % a.pid
            env = {'PIPE_IN': a.php_helper_in, 'PIPE_OUT': a.php_helper_out}
            util.mkfifo(a.php_helper_in)
            util.mkfifo(a.php_helper_out)
            a.php_helper = subprocess.Popen(
                [thisdir + '/../test/webserv/db/db_helper.php'], env=env)

        ## send function request to php helper via pipe
        util.file_write(a.php_helper_in, self.fn_args() + '\n')

        ## read response from php helper and return it in retnode
        ## XXX: check that response is not error
        self.retnode.data = util.file_read(a.php_helper_out).strip()

        a.wait_for_dbcall(self)
Esempio n. 11
0
    def wait_for_dbcall(self, action):
        if self.run is None:
            return

        ## return response to prev db func call
        if isinstance(action, DbFnCall):
            util.file_write(self.php_out, action.retnode.data)

        rd = PipeReader(self.php_in)
        rd.start()
        while self.run.poll() is None and rd.data is None:
            time.sleep(0.05)

        if self.run.poll() is not None:
            ## redo done for this php actor; cancel remaining actions
            for a in [x for x in self.actions if x > action]:
                if not isinstance(a, PhpExit):
                    a.cancel = True
            rd.kill()
            return

        rd.join()
        fn, fid, args = rd.data.strip().split(':')
        print "PhpActor: wait_for_dbcall: got request:", fn, fid, args

        ## if next action is the same as the requested action, update
        ## its args. else, create a new action
        nextact = min([x for x in self.actions if x > action])
        if isinstance(nextact, DbFnCall) and nextact.fn == fn:
            ts = max([x for x in nextact.argsnode.checkpts if x < nextact]).tac
            u = UpdateBufAction(util.between(ts, nextact.tic), args,
                                nextact.argsnode)
            u.connect()
        else:
            ts = util.between(action.tac, nextact.tic)
            name = (self.pid, fn, time.time())
            argsnode = mgrutil.BufferNode(name + ('fncall', 'args'), ts, args)
            retnode = mgrutil.BufferNode(name + ('fncall', 'ret'), ts, None)
            fncall = DbFnCall(fn, fid, self, argsnode, retnode)
            connect(fncall, ts + (1, ), ts + (2, ))
Esempio n. 12
0
    def fn_args(self):
        if self.fn != 'pg_query':
            return self.fn + ' ' + self.argsnode.data

        ## for SELECTs, add end_ts='infinity' to the query
        ##
        ## for INSERT/UPDATE/DELETE, write the current ts to a
        ## file that timetravel trigger uses as the update time
        args = []
        for a in json.loads(urllib.unquote(self.argsnode.data)):
            if a.startswith('SELECT'):
                args.append(a.replace("WHERE ",
                                      "WHERE end_ts='infinity' AND "))
            else:
                args.append(a)
            if a.startswith('INSERT') or a.startswith(
                    'UPDATE') or a.startswith('DELETE'):
                util.file_write('/tmp/retro/retro_rerun', '')
                ## XXX: should this be pid of current php process instead of self.actor?
                util.file_write('/tmp/retro/tt_params',
                                str(self.tic[0]) + " " + str(self.actor.pid))
        return self.fn + ' ' + urllib.quote(json.dumps(args))
Esempio n. 13
0
    def __init__(self, fname):
        self.fname = fname
        dbg("reading empires2_x1_p1 from %s..." % fname, 1)

        fname = file_get_path(fname, write=False)
        f = file_open(fname, binary=True, write=False)

        dbg("decompressing data from %s" % fname, 1)

        compressed_data = f.read()
        # decompress content with zlib (note the magic -15)
        # -15: - -> there is no header, 15 is the max windowsize
        self.content = zlib.decompress(compressed_data, -15)
        f.close()

        compressed_size = len(compressed_data)
        decompressed_size = len(self.content)

        del compressed_data

        dbg("length of compressed data: %d = %d kB" % (compressed_size, compressed_size / 1024), 1)
        dbg("length of decompressed data: %d = %d kB" % (decompressed_size, decompressed_size / 1024), 1)

        from util import file_write

        print("saving uncompressed dat file...")
        file_write(file_get_path("info/empires2x1p1.raw", write=True), self.content)

        # the main data storage
        self.data = dict()

        offset = 0
        offset = self.read(self.content, offset)

        dbg(
            "finished reading empires*.dat at %d of %d bytes (%f%%)."
            % (offset, decompressed_size, 100 * (offset / decompressed_size)),
            1,
        )
Esempio n. 14
0
 def kill(self):
     self.join(0.1)
     if self.isAlive():
         print "PipeReader(%s) is blocked. Waking it up.." % self.pn
         util.file_write(self.pn, "\n")
         self.join()
Esempio n. 15
0
def main():

	args = parse_args()

	#set verbose value in util
	set_verbosity(args.verbose)

	#assume to extract all files when nothing specified.
	if args.extract == []:
		args.extract.append('*:*.*')

	extraction_rules = [ ExtractionRule(e) for e in args.extract ]

	merge_images = not args.nomerge
	exec_dev = args.development

	#set path in utility class
	dbg("setting age2 input directory to " + args.srcdir, 1)
	set_read_dir(args.srcdir)

	#write mode is disabled by default, unless destdir is set
	if args.destdir != '/dev/null' and not args.listfiles and not args.dumpfilelist:
		dbg("setting write dir to " + args.destdir, 1)
		set_write_dir(args.destdir)
		write_enabled = True
	else:
		write_enabled = False


	drsfiles = {
		"graphics":  DRS("Data/graphics.drs"),
		"interface": DRS("Data/interfac.drs"),
		"sounds0":   DRS("Data/sounds.drs"),
		"sounds1":   DRS("Data/sounds_x1.drs"),
		"gamedata0": DRS("Data/gamedata.drs"),
		"gamedata1": DRS("Data/gamedata_x1.drs"),
		"gamedata2": DRS("Data/gamedata_x1_p1.drs"),
		"terrain":   DRS("Data/terrain.drs")
	}

	palette = ColorTable(drsfiles["interface"].get_file_data('bin', 50500))

	if exec_dev:
		if write_enabled:
			print("no indev function available at the moment.")
			return
		else:
			raise Exception("development mode requires write access")

	if write_enabled:
		file_write(file_get_path('processed/player_color_palette.pal', write=True), palette.gen_player_color_palette())

		import blendomatic
		blend_data = blendomatic.Blendomatic("Data/blendomatic.dat")

		for (modeidx, png, size, metadata) in blend_data.draw_alpha_frames_merged():
			fname = 'alphamask/mode%02d' % (modeidx)
			filename = file_get_path(fname, write=True)
			file_write(filename + ".png", png)
			file_write(filename + ".docx", metadata)
			dbg("blending mode%02d -> saved packed atlas" % (modeidx), 1)

		import gamedata.empiresdat
		datfile = gamedata.empiresdat.Empires2X1P1("Data/empires2_x1_p1.dat")
		filename = file_get_path("processed/terrain_meta.docx", write=True)

		tmeta = "#terrain specification\n"
		tmeta += "#idx=terrain_id, slp_id, sound_id, blend_mode, blend_priority, angle_count, frame_count, terrain_dimensions0, terrain_dimensions1, terrain_replacement_id, name0, name1\n"

		tmeta += "n=%d\n" % len(datfile.data["terrain"]["terrain"])

		i = 0
		blending_modes = set()
		for tk in datfile.data["terrain"]["terrain"]:
			if tk["slp_id"] < 0:
				continue

			blending_modes.add(tk["blend_mode"])

			wanted = ["terrain_id", "slp_id", "sound_id", "blend_mode", "blend_priority", "angle_count", "frame_count", "terrain_dimensions0", "terrain_dimensions1", "terrain_replacement_id", "name0", "name1"]

			line = [tk[w] for w in wanted]

			#as blending mode 0==1 and 7==8, and ice is 5 for sure,
			#we subtract one from the ids, and can map -1 to 0, as mode (0-1) == (1-1)
			#TODO: this can't be correct...
			line[3] -= 1
			if line[3] < 0:
				line[3] = 0

			line = map(str, line)
			tmeta += ("%d=" % i) + ",".join(line) + "\n"
			i += 1

		file_write(filename, tmeta)


		filename = file_get_path("processed/blending_meta.docx", write=True)

		bmeta = "#blending mode specification\n"
		bmeta += "#yeah, i know that this content is totally stupid, but that's how the data can be injected later\n"
		bmeta += "#idx=mode_id\n"

		bmeta += "n=%d\n" % len(blending_modes)

		i = 0
		for m in blending_modes:
			bmeta += "%d=%d\n" % (i, m)
			i += 1

		file_write(filename, bmeta)


		if args.extrafiles:
			file_write(file_get_path('info/colortable.pal.png', write=True), palette.gen_image())


	file_list = dict()
	files_extracted = 0

	for drsname, drsfile in drsfiles.items():
		for file_extension, file_id in drsfile.files:
			if not any((er.matches(drsname, file_id, file_extension) for er in extraction_rules)):
				continue

			if args.listfiles or args.dumpfilelist:
				fid = int(file_id)
				if fid not in file_list:
					file_list[fid] = list()

				file_list[fid] += [(drsfile.fname, file_extension)]
				continue

			if write_enabled:
				fbase = file_get_path('raw/' + drsfile.fname + '/' + str(file_id), write=True)
				fname = fbase + '.' + file_extension

				dbg("Extracting to " + fname + "...", 2)

				file_data = drsfile.get_file_data(file_extension, file_id)

			if file_extension == 'slp':

				if write_enabled:

					s = SLP(file_data)
					out_file_tmp = drsname + ": " + str(file_id) + "." + file_extension

					if merge_images:
						png, (width, height), metadata = s.draw_frames_merged(palette)
						file_write(fname + ".png", png)
						file_write(fname + '.docx', metadata)
						dbg(out_file_tmp + " -> saved packed atlas", 1)

					else:
						for idx, (png, metadata) in enumerate(s.draw_frames(palette)):
							filename = fname + '.' + str(idx)
							file_write(filename + '.png', png.image)
							file_write(filename + '.docx', metadata)

							dbg(out_file_tmp + " -> extracting frame %3d...\r" % (idx), 1, end="")
						dbg(out_file_tmp + " -> saved single frame(s)", 1)

			elif file_extension == 'wav':

				if write_enabled:

					file_write(fname, file_data)

					use_opus = True

					if use_opus:
					#opusenc invokation (TODO: ffmpeg?)
						opus_convert_call = ['opusenc', fname, fbase + '.opus']
						dbg("converting... : " + fname + " to opus.", 1)

						oc = subprocess.Popen(opus_convert_call, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
						oc_out, oc_err = oc.communicate()

						if ifdbg(2):
							oc_out = oc_out.decode("utf-8")
							oc_err = oc_err.decode("utf-8")

							dbg(oc_out + "\n" + oc_err, 2)

						#remove original wave file
						remove(fname)


			else:
				#this type is unknown or does not require conversion

				if write_enabled:
					file_write(fname, file_data)

			files_extracted += 1

	if write_enabled:
		dbg(str(files_extracted) + " files extracted", 0)

	if args.listfiles or args.dumpfilelist:
		#file_list = sorted(file_list)
		if not args.dumpfilelist:
			for idx, f in file_list.items():
				ret = "%d = [ " % idx
				for file_name, file_extension in f:
					ret += "%s/%d.%s, " % (file_name, idx, file_extension)
				ret += "]"
				print(ret)
		else:
			ret = "#!/usr/bin/python\n\n#auto generated age2tc file list\n\n"
			import pprint
			ret += "avail_files = "
			ret += pprint.pformat(file_list)
			print(ret)
Esempio n. 16
0
def spirals_display(Prg,
                    Spirals,
                    Width,
                    Height,
                    SleepTime=0,
                    Prefix="",
                    PauseAtEnd=0,
                    PauseAtStart=0,
                    SaveAsFilename=None):
    SaveAsTxt = list()

    CharBg = "๐Ÿ”ธ"  #small orange diamond
    CharsetColorful = [
        "๐Ÿ˜Ž",  # smiling face with sunglasses
        "๐Ÿ”˜",  #radio button,
        "๐ŸŒผ",
        "๐Ÿ€",
        "๐Ÿ™",
        "๐ŸŽƒ",  # jack-o-lantern
        "๐Ÿธ",  # frog face
        "๐ŸŽ…",  # father christmas
        "๐Ÿจ",  # koala
        "๐ŸŽ",  # Wrapped present,
        "๐ŸŒท",  # tulip
        "๐Ÿ€",  # basketball and hoop
        "๐Ÿ˜ˆ",  # smiling face with horns
        "๐Ÿ•",  # clock face, one o'clock
        "๐Ÿ”ด",  #large red circle
        "๐Ÿ”ต",  #large blue circle,
        "๐Ÿ”†",  # high brightness symbol
        "๐Ÿ’œ",  #purple heart
        "๐Ÿ”…",  # low brightness symbol
        "๐ŸŒ‘",  # new moon symbol
        "๐Ÿ’ก",  # electric light bulb
    ]

    Area = area.make_empty(Width, Height, CharBg)
    print(
        area.to_string(Area,
                       Prefix=Prefix,
                       AfterString="\n\n",
                       BeforeString="\n" * 33))
    time.sleep(PauseAtStart)

    for Coords in Spirals.values():
        CharColorful = CharsetColorful.pop(0)
        CharsetColorful.append(
            CharColorful)  # shifting elements in Colorful chars

        for X, Y in Coords:
            Area[X][Y] = CharColorful
            AreaTxt = area.to_string(Area,
                                     Prefix=Prefix,
                                     AfterString="\n\n",
                                     BeforeString="\n" * 33)
            SaveAsTxt.append(AreaTxt)
            print(AreaTxt)
            if SleepTime:
                time.sleep(SleepTime)

    if PauseAtEnd:
        time.sleep(PauseAtEnd)

    if SaveAsFilename:
        util.file_write(Prg, os.path.join(Prg["DirTmpPath"], SaveAsFilename),
                        "".join(SaveAsTxt))
Esempio n. 17
0
def get_data(level):
    url = data_url + '?level=' + str(level)
    raw_text = s.get(url).text
    raw_text = re.search('.*<br>', raw_text).group()[:-4]

    util.file_write(level, raw_text)
Esempio n. 18
0
    def test_util_file_write_append_del(self):
        if self._test_exec("test_file_write_append_del"):
            Prg = self.Prg

            Ret = util.file_write(Prg, Fname="")
            self.assertFalse(Ret)

            Content = "apple "
            Fname = os.path.join(Prg["DirWork"], "test_file_write.txt")
            RetWrite = util.file_write(Prg, Fname=Fname, Content=Content)
            RetAppend = util.file_append(Prg, Fname=Fname, Content="tree")
            self.assertTrue(RetWrite)
            self.assertTrue(RetAppend)
            RetRead, ContentRead = util.file_read_all(Prg, Fname)

            self.assertTrue(RetRead)
            self.assertEqual(ContentRead, "apple tree")

            FileState, FileGzipped = util.file_is_gzipped(Prg, Fname)
            self.assertEqual("file_exists", FileState)
            self.assertEqual("not_gzipped", FileGzipped)

            Sample = "รrvรญztลฑrล‘ tรผkรถrfรบrรณgรฉp"
            RetWriteGz = util.file_write(Prg,
                                         Fname=Fname,
                                         Content=Sample,
                                         Gzipped=True)
            self.assertTrue(RetWriteGz)
            RetReadGz, ContentReadGz = util.file_read_all(Prg,
                                                          Fname,
                                                          Gzipped=True)
            self.assertTrue(RetReadGz)
            self.assertEqual(ContentReadGz, Sample)

            FileState, FileGzipped = util.file_is_gzipped(Prg, Fname)
            self.assertEqual("file_exists", FileState)
            self.assertEqual("gzipped", FileGzipped)

            BinWanted = Sample.encode()
            util.file_write_utf8_error_avoid(Prg, Fname, Sample)
            BinFromFile = util.file_read_all_simple(Fname, "rb")
            # print("\n######### >>" + util.file_read_all(Prg, Fname)[1] + "<<")
            # print("\n######### >>", Sample.encode(), "<<")
            self.assertEqual(BinWanted, BinFromFile)

            FileWriteRet = util.file_write_with_check(Prg, Fname, Sample)
            TxtFromFile = util.file_read_all_simple(Fname)
            self.assertEqual(TxtFromFile, Sample)
            self.assertTrue(FileWriteRet)

            util.file_write_with_check(Prg, Fname,
                                       "")  # clear the content of the file

            # writing is unsuccessful because writer fun doesn't do anything
            def empty_writer_fun(Prg, Fname, Sample):
                pass

            FileWriteRet = util.file_write_with_check(
                Prg, Fname, Sample, WriterFun=empty_writer_fun)
            self.assertFalse(FileWriteRet)

            RetDel1 = util.file_del(Fname)
            RetDel2 = util.file_del(Fname)
            self.assertTrue(RetDel1)
            self.assertFalse(RetDel2)

            FileState, FileGzipped = util.file_is_gzipped(Prg, Fname)
            self.assertEqual("file_not_found", FileState)
            self.assertEqual("", FileGzipped)
Esempio n. 19
0
def get_data(level):
    url = data_url + '?level=' + str(level)
    raw_text = s.get(url).text
    raw_text = re.search('.*<br>', raw_text).group()[ : -4]

    util.file_write(level, raw_text)