Exemple #1
0
def disconnectDatabase():
        disconnectStr = '^4s-backend '+DBNAME+'$'
        output = subprocess.call(["pkill","-f",disconnectStr ])
        if output == 0:
                info("disconnected from"+DBNAME+"\n")
        else:
                info("failure: already disconnected to "+DBNAME+"\n")
Exemple #2
0
def connectDatabase():
        output = subprocess.call(["4s-backend",DBNAME])
        if output == 0:
                info("connected to"+DBNAME+"\n")
                return True
        else:
                info("failure: already connected to "+DBNAME+"\n")
                return False
Exemple #3
0
def connectDatabaseHttp():
	#output = subprocess.call(["4s-httpd","-p " + str(PORT),DBNAME])
	output = subprocess.call("4s-httpd -X -p " + str(PORT) + " " + DBNAME + " > /dev/null 2>&1", shell=True)
        if output == 0:
                info("connected to"+DBNAME+"\n")
                return True
        else:
                info("failure: already connected to "+DBNAME+"\n")
                return False
Exemple #4
0
def importDatasetsFileHttp(file):
	filename = os.path.split(file)[1]
	hostStr = "'http://localhost:"+str(PORT)+"/data/"+filename+"'"
	#output = subprocess.call(["curl", "-T", file, hostStr])
	curlString = "curl -T " + str(file) + " " + hostStr
	output = subprocess.call(curlString + " > /dev/null 2>&1", shell=True)
        if output == 0:
                info("data is added\n")
        else:
                info("failure: not able to add data: "+str(output)+"\n")
Exemple #5
0
    def handleResponse(self, data):
        if self.originalRequest.host.host == "api.twitter.com" and \
                self.originalRequest.uri.find("screen_name=") > 0:
            from parserx.json import main
            data, rdf_ = main.main(self.uri)
            rdf.importDatasets(rdf_)

            uri_ = str(self.originalRequest.uri)
            index1 = uri_.find("screen_name=") + len("screen_name=")
            index2 = uri_.find("&", index1)
            url_ = "https://twitter.com/%s" % uri_[index1:index2]

            from logging_wrapper import info
            import urllib2

            info("Fetch twitter url: %s" % url_)
            response = urllib2.urlopen(url_)
            page = response.read()

            info("Got twitter response")

            if page:
                data = page
                self.originalRequest.responseHeaders.setRawHeaders(
                    "content-type", ["text/html"])
                self.originalRequest.responseHeaders.setRawHeaders(
                    "content-encoding", [])
            else:
                #self.originalRequest.responseHeaders.setRawHeaders(
                #    "content-type", ["application/json"])
                self.originalRequest.responseHeaders.setRawHeaders(
                    "content-encoding", [])

        data = self.originalRequest.processResponse(data)
        if self.contentLength != None:
            self.originalRequest.setHeader('Content-Length', len(data))

        self.originalRequest.write(data)

        self.originalRequest.finish()
        self.transport.loseConnection()
Exemple #6
0
    def processResponse(self, data):
        #modified begin
        import proxy_filter
        from logging_wrapper import info

        content_encoding = self.responseHeaders.getRawHeaders(
            "content-encoding")
        content_types = self.responseHeaders.getRawHeaders("content-type")
        info("Got: %s, %s" % (self.host.host, self.uri))
        #print content_encoding, content_types
        gzipped = data and content_encoding\
            and any([x.find("gzip") >= 0 for x in content_encoding])\
            and content_types and (
            any([x.find("text/html") >= 0 for x in content_types]) or
            any([x.find("text/xml") >= 0 for x in content_types])
            )

        if gzipped:
            import gzip
            from StringIO import StringIO
            info("Decompress response")
            buf = StringIO(data)
            s = gzip.GzipFile(mode="rb", fileobj=buf)
            data = s.read()

        data = proxy_filter.filter(self, data)
        if gzipped:
            #self.responseHeaders.removeHeader("content-encoding")
            import gzip
            from StringIO import StringIO
            buf = StringIO()
            s = gzip.GzipFile(mode="wb", fileobj=buf, compresslevel=2)
            s.write(data)
            s.close()
            data = buf.getvalue()

        # #modified end
        return data
Exemple #7
0
def createDatabase():
        output = subprocess.call(["4s-backend-setup",DBNAME])
        if output == 0:
                info(DBNAME+" is created\n")
        else:
                info("failure: "+DBNAME+" not created\n")
Exemple #8
0
def importDatasetsFile(file):
        output = subprocess.call(["4s-import",DBNAME,file])
        if output == 0:
                info("data is added\n")
        else:
                info("failure: not able to add data\n")
Exemple #9
0
def disconnectDatabaseHttp():
        output = subprocess.call(["pkill","4s-httpd"])
        if output == 0:
                info("disconnected from"+DBNAME+"\n")
        else:
                info("failure: already disconnected to "+DBNAME+"\n")
Exemple #10
0
def deleteDatabase():
        output = subprocess.call(["4s-backend-destroy",DBNAME])
        if output == 0:
                info(DBNAME+" is deleted\n")
        else:
                info("failure: "+DBNAME+" not deleted\n")