Beispiel #1
0
def getJudicature():
    params = loadParams()
    id = params.get("id")
    data = JudicatureDao.quertJudicature(id)
    json = stringify(data).encode("utf8")
    # WebSocket.sendMessage("{'action': 'getJudicature', 'data': " + json + ", 'returnCode': '0', 'returnMsg': 'SUCCESS'}")
    return parseResponse(0, {"data": data})
Beispiel #2
0
def queryTaskStatusByTaskJobId():
    params = loadParams()
    taskJobId = params["taskJobId"]
    data = BaseDao.query(TaskStatusInfoField, text('task_job_id="'+taskJobId+'"'))
    json = stringify(data).encode("utf8")
    # WebSocket.sendMessage("{'action':'queryTaskStatusByTaskJobId','data':" + json + ",'returnCode':'0','returnMsg':'SUCCESS'}")
    return parseResponse(0, {"data": data})
Beispiel #3
0
def getTortTotal():
    params = loadParams()
    id = params.get("id")
    data = TortTotalDao.queryTortTotal(66)
    json = stringify(data).encode("utf8")
    print json
    # WebSocket.sendMessage("{'action': 'getTortTotal', 'data': " + json + ", 'returnCode': '0', 'returnMsg': 'SUCCESS'}")
    return parseResponse(0, {'data': data})
Beispiel #4
0
def parseResponse(errorCode,dict=None,flag=False):
    result=stringify(parseResponseJson(errorCode,dict))
    if flag:
        return result
    rsp = make_response(result)
    allowCrossOrigin=eval(ConfigUtils.getWebPorperties("ALLOW_CROSS_ORIGIN"))
    if allowCrossOrigin:
        rsp.headers['Access-Control-Allow-Origin'] = '*'
    rsp.mimetype = 'application/json;charset=utf-8'
    return rsp
Beispiel #5
0
def getDownLoadCertUrl():
    params = loadParams()
    phone = params.get("phone")
    email = params.get("email")
    out_id = params.get("out_id")
    file_id = params.get("file_id")
    downLoadUrl = depositcert.get_download_cert_url(file_id, phone, email,
                                                    out_id)
    json = stringify(downLoadUrl).encode("utf8")
    WebSocket.sendMessage("{'action': 'getDownLoadCertUrl', 'data': " + json +
                          ", 'returnCode': '0', 'returnMsg': 'SUCCESS'}")
    return parseResponse(0, {"downLoadUrl": downLoadUrl})
Beispiel #6
0
def saveCert():
    params = loadParams()
    content = params.get("content")
    phone = params.get("phone")
    name = params.get("name")
    id = params.get("id")
    result = depositcert.deposit_cert(content, name, phone)
    json = {}
    try:
        json = stringify(result).encode("utf8")
    except Exception, e:
        logging.error(str(e))
Beispiel #7
0
def queryTortDate():
    # params = loadParams()
    # periodType = params["periodType"]
    data = BaseDao.query(TortReInfoField)
    # 制造动态假数据begin
    if data:
        for d in data:
            if isinstance(d, TortReInfoField):
                value = RandomUtil.random(0, 50)
                d.totalCnt += value
    # 制造动态假数据end
    json = stringify(data).encode("utf8")
    # WebSocket.sendMessage( "{'action':'queryTortDate','data':" + json + ",'returnCode':'0','returnMsg':'SUCCESS'}")
    return parseResponse(0, {"data": data})
Beispiel #8
0
def queryCerts():
    params = loadParams()
    phone = params.get("phone")
    email = params.get("email")
    out_id = params.get("out_id")
    result = depositcert.query_certs(phone, email, out_id)
    if result['code'] == 0:
        files = result['files']
        for file in files:
            long_time = file['save_time']
            timeArray = time.localtime(long_time / float(1000))
            otherStyleTime = time.strftime("%Y-%m-%d %H:%M:%S", timeArray)
            file['save_time'] = otherStyleTime
        json = stringify(result['files']).encode("utf8")
        WebSocket.sendMessage("{'action': 'queryCerts', 'data': " + json +
                              ", 'returnCode': '0', 'returnMsg': 'SUCCESS'}")
        return parseResponse(0, {"result": result['files']})
    else:
        return parseResponse(-1)
Beispiel #9
0
def queryGisDetailByDetailKv():
    params = loadParams()
    taskJobId = params["taskJobId"]
    # data = BaseDao.query(GisInfoDetailField, text('task_job_id="'+taskJobId+'"'))
    data = queryGisDetailBigOne(taskJobId)
    json = stringify(data).encode("utf8")
    # WebSocket.sendMessage("{'action':'queryGisDetailBigByDetailKv','data':" + json + ",'returnCode':'0','returnMsg':'SUCCESS'}")
    return parseResponse(0, {"data": data})


# @app.route("/ceshi", methods=['POST', 'GET'])
# def ceshi():
#     params = loadParams()
#     taskJobId = params["taskJobId"]
#     data = queryGisDetailOne(taskJobId);
#     json = stringify(data).encode("utf8")
#     WebSocket.sendMessage("{'action':'queryGisDetailByTaskJobId','data':'" + json + "','returnCode':'0','returnMsg':'SUCCESS'}")
#     return parseResponse(0, {"data": data})
# if __name__ == '__main__':
#     app.run()
Beispiel #10
0
def queryGisDetailByTaskJobId():
    params = loadParams()
    taskJobId = params["taskJobId"]
    if taskJobId:
        data = queryGisDetailOne(taskJobId)
    else:
        data = queryGisDetailAll()
    # 制造动态假数据begin
    if data:
        for result in data:
            d = result[0]
            if isinstance(d, GisInfoField):
                if d.detailCnt:
                    d.detailCnt += RandomUtil.random(0, 50)
                    d.provinceCnt += RandomUtil.random(0, 50)
                    d.overseasCnt += RandomUtil.random(0, 50)
    # 制造动态假数据end
    json = stringify(data).encode("utf8")
    # WebSocket.sendMessage("{'action':'queryGisDetailByTaskJobId','data':" + json + ",'returnCode':'0','returnMsg':'SUCCESS'}")
    return parseResponse(0, {"data": data})
Beispiel #11
0
def getStaticAssetsList():
    query = urlparse.urlparse(request.url).query
    param_dict = dict([(k, v[0]) for k, v in urlparse.parse_qs(query).items()])
    # user_id = param_dict.get("userid")
    # if not user_id:
    #     #检验用户 admin
    #     result =  "error user_id"
    # else :
    assetList = pc.getStaticAssetsList()
    reponseMsg = {}
    reponseMsg["returnCode"] = '0'
    reponseMsg["Msg"] = 'success'
    if assetList:
        for d in assetList:
            reponseMsg[d] = assetList[d]
    result = stringify(reponseMsg)
    rsp = make_response(result)
    rsp.mimetype = 'application/json;charset=utf-8'
    result =  rsp

    return result
Beispiel #12
0
def queryGeneralDateListByPeriodType():
    params = loadParams()
    periodType = params["periodType"]
    data = Session.query(TotalReInfoField).filter(
        text('period_type=' + periodType + '')).group_by("content_type").all()
    # 制造动态假数据begin
    totalCount = 0
    newData = []
    currentzerotime = currentZeroTime()  # 当前时间的零点时间戳
    currenttick = time.time()  # 当前时间戳
    currentPercent = (currenttick - currentzerotime) / (24 * 60 * 60
                                                        )  # 当前时间所占一整天的百分比
    bigCount = math.floor(maximumData / (24 * 60 * 60))
    smallCount = math.floor(minimunData / (24 * 60 * 60))
    random = smallCount + (
        bigCount -
        smallCount) * currentPercent  #生成逐步增加的处于smallcount和bigcount之间的随机数
    currentData = int(random *
                      (currenttick - currentzerotime))  #根据随机数计算得出当前总数据
    if data:
        for d in data:
            if isinstance(d, TotalReInfoField):
                if d.contentType != 0:
                    value = int(
                        addDataByContentType(d.contentType, currentData))
                    d.tortCnt = value
                    if d.contentType != 5:
                        totalCount += d.tortCnt
                    newData.append(d)
                    pass
                else:
                    totalData = d
        totalData.tortCnt = totalCount
        newData.append(totalData)
    # 制造动态假数据end
    json = stringify(newData).encode("utf8")
    # WebSocket.sendMessage("{'action':'queryGeneralDateList','data':"+json+",'returnCode':'0','returnMsg':'SUCCESS'}")
    return parseResponse(0, {"data": newData})
Beispiel #13
0
def checkFinishJob():
    keys=RedisUtils.hkeys(ConfigUtils.getRedisPorperties(KEYMAP.FINISH_SPIDER_REDIS_KEY))
    for key in keys :
        temp=RedisUtils.hget(ConfigUtils.getRedisPorperties(KEYMAP.FINISH_SPIDER_REDIS_KEY), key)
        newJobTemplate=json.loads(temp)
        url=newJobTemplate['url']
        try:
            request = urllib2.Request(
                url=url,
                headers=(random.choice(user_agent_list))
            )
            response = urllib2.urlopen(request)
            urldate = response.headers['date']
            tempDate= newJobTemplate['urldate']
            print urldate
            print tempDate
            if urldate == tempDate:
                pass
            else:
                newJobTemplate['urldate']=urldate

                taskJobHistoryId = newJobTemplate['taskJobHistoryId']
                taskJobHistory=Session.query(TaskJobHistory).filter(TaskJobHistory.id==taskJobHistoryId,TaskJobHistory.delFlag==False).order_by(" create_time desc").first()
                taskJob=Session.query(TaskJob).filter(TaskJob.id==taskJobHistory.taskJobId).first()
                LoggerDao.addTaskJobLogger(taskJob,LoggerDao.LoggerType.URL_TO_REDIS,
                                           jobTemplateId=newJobTemplate['id'],taskJobHistoryId=taskJobHistoryId,
                                           content=u"redis_入库",url=url,status=TaskStatus.RUNNING)

                RedisUtils.lpush(ConfigUtils.getRedisPorperties(KEYMAP.ASSIST_SPIDER_REDIS_KEY), taskJobHistoryId)
                RedisUtils.lpush(ConfigUtils.getRedisPorperties(KEYMAP.ASSIST_SPIDER_REDIS_KEY) + "_" + taskJobHistoryId,stringify(newJobTemplate))
                RedisUtils.hset(ConfigUtils.getRedisPorperties(KEYMAP.FINISH_SPIDER_REDIS_KEY), newJobTemplate['id'],stringify(newJobTemplate))
        except Exception,e:
            pass
            print e
Beispiel #14
0
def parseUrlAndInsertRedis(taskJob,
                           paramMap={},
                           taskJobParam=None,
                           taskJobHistory=None,
                           jobTemplate=None):
    if TaskType.DEPTH == str(taskJob.type):
        if bloomfilter_check(taskJob.id, taskJob.url):
            RedisUtils.lpush(
                ConfigUtils.getRedisPorperties(KEYMAP.DEPTH_SPIDER_REDIS_KEY),
                taskJobHistory.id)
            RedisUtils.lpush(
                ConfigUtils.getRedisPorperties(KEYMAP.DEPTH_SPIDER_REDIS_KEY) +
                "_" + taskJobHistory.id, stringify(taskJob))
    else:
        url = taskJob.url
        taskJobParamList = TaskJobDao.queryTaskJobParam(taskJob.id)
        if taskJobParam != None:
            if isinstance(taskJobParam, list):
                taskJobParamList.extend(taskJobParam)
            else:
                taskJobParamList.append(taskJobParam)
        jobTemplateParamList = TemplateDao.queryJobTemplateParamByJobTemplateId(
            jobTemplate.id)
        if jobTemplateParamList != None and len(jobTemplateParamList) > 0:
            taskJobParamList.extend(jobTemplateParamList)
        if taskJobHistory != None:
            jobTemplateParamTaskJob = JobTemplateParam(
                paramNameEn="task_job_id_sequence",
                paramValue=str(taskJobHistory.id))
            jobTemplateParamList.append(jobTemplateParamTaskJob)
        if taskJobParamList == None or len(taskJobParamList) <= 0:
            if str(taskJob.type) == TaskType.BATCH:
                url = jobTemplate.url
            renderUrl = RenderUtils.render(url, paramMap)

            # if bloomfilter_check(taskJob.id, renderUrl):
            newJobTemplate = ClassCopy.copyToNewInstances(
                jobTemplate, JobTemplate)
            taskJobHistoryId = taskJobHistory.id
            urlListStatus = UrlClazz(url=jobTemplate.url,
                                     parentUrl=paramMap.get("task_job_url"),
                                     jobTemplateId=jobTemplate.id,
                                     jobTemplateParentId=jobTemplate.parentId,
                                     taskJobId=taskJob.id,
                                     taskJobHistoryId=taskJobHistoryId)
            # try:
            #     request = urllib2.Request(
            #         url=url,
            #         headers={
            #             'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'}
            #     )
            #     response = urllib2.urlopen(request)
            #     urldate = response.headers['date']
            # except Exception:
            #     pass
            #     print Exception
            setattr(newJobTemplate, "taskJobId", taskJob.id)
            setattr(newJobTemplate, "taskJobHistoryId", taskJobHistoryId)
            setattr(newJobTemplate, "url", renderUrl)
            setattr(newJobTemplate, "extraData", paramMap)
            # setattr(newJobTemplate, "urldate", urldate)
            setattr(newJobTemplate, "urlListStatusId", urlListStatus.id)
            LoggerDao.addTaskJobLogger(taskJob,
                                       LoggerDao.LoggerType.URL_TO_REDIS,
                                       jobTemplateId=newJobTemplate.id,
                                       taskJobHistoryId=taskJobHistoryId,
                                       content=u"redis_入库",
                                       url=renderUrl,
                                       status=TaskStatus.RUNNING)
            # if (hashswitch):
            #     tempList.append(stringify(newJobTemplate))
            # else:
            # mainId.append(stringify(newJobTemplate))
            RedisUtils.lpush(
                ConfigUtils.getRedisPorperties(KEYMAP.ASSIST_SPIDER_REDIS_KEY),
                taskJobHistoryId)
            RedisUtils.lpush(
                ConfigUtils.getRedisPorperties(KEYMAP.ASSIST_SPIDER_REDIS_KEY)
                + "_" + taskJobHistoryId, stringify(newJobTemplate))
            RedisUtils.hset(
                ConfigUtils.getRedisPorperties(KEYMAP.FINISH_SPIDER_REDIS_KEY),
                newJobTemplate.id, stringify(newJobTemplate))
            saveUrlListStatus(urlListStatus)
        else:
            for data in paraseJobTemplateList(taskJobParamList, paramMap):
                if str(taskJob.type) == TaskType.BATCH:
                    url = jobTemplate.url
                parentId = paramMap.get("dataParentId")
                paramMap = dict(paramMap.items() + data.items())
                renderUrl = RenderUtils.render(url, paramMap)
                # if bloomfilter_check(taskJob.id, renderUrl):
                newJobTemplate = ClassCopy.copyToNewInstances(
                    jobTemplate, JobTemplate)
                taskJobHistoryId = taskJobHistory.id
                urlListStatus = UrlClazz(
                    url=renderUrl,
                    parentUrl=paramMap.get("task_job_url"),
                    jobTemplateId=jobTemplate.id,
                    jobTemplateParentId=jobTemplate.parentId,
                    taskJobId=taskJob.id,
                    taskJobHistoryId=taskJobHistoryId)
                # try:
                #     request = urllib2.Request(
                #         url=url,
                #         headers={
                #             'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'
                #         }
                #     )
                #     response = urllib2.urlopen(request)
                #     urldate = response.headers['date']
                # except Exception:
                #     pass
                #     print Exception
                setattr(newJobTemplate, "taskJobId", taskJob.id)
                setattr(newJobTemplate, "taskJobHistoryId", taskJobHistoryId)
                setattr(newJobTemplate, "url", renderUrl)
                setattr(newJobTemplate, "dataParentId", parentId)
                setattr(newJobTemplate, "extraData", paramMap)
                # setattr(newJobTemplate, "urldate", urldate)
                setattr(newJobTemplate, "urlListStatusId", urlListStatus.id)
                LoggerDao.addTaskJobLogger(taskJob,
                                           LoggerDao.LoggerType.URL_TO_REDIS,
                                           jobTemplateId=newJobTemplate.id,
                                           taskJobHistoryId=taskJobHistoryId,
                                           content=u"redis_入库_多参数",
                                           url=renderUrl,
                                           status=TaskStatus.RUNNING)
                # if (hashswitch):
                #     tempList.append(newJobTemplate)
                # else:
                RedisUtils.lpush(
                    ConfigUtils.getRedisPorperties(
                        KEYMAP.ASSIST_SPIDER_REDIS_KEY), taskJobHistoryId)
                RedisUtils.lpush(
                    ConfigUtils.getRedisPorperties(
                        KEYMAP.ASSIST_SPIDER_REDIS_KEY) + "_" +
                    taskJobHistoryId, stringify(newJobTemplate))
                # mainId.append(stringify(newJobTemplate))
                RedisUtils.hset(
                    ConfigUtils.getRedisPorperties(
                        KEYMAP.FINISH_SPIDER_REDIS_KEY), newJobTemplate.id,
                    stringify(newJobTemplate))
                saveUrlListStatus(urlListStatus)
Beispiel #15
0
            taskJobReField.taskJobId = taskJobId
            taskJobReField.delFlag = False
            taskJobReField.jobTemplateFieldId = field.id
            taskJobReField.createTime=datetime.now()
            Session.add(taskJobReField)
        return
    fieldList=fieldIds.split(",")
    for fieldId in fieldList:
        taskJobReField=TaskJobReField(id=uuid.uuid1())
        taskJobReField.jobTemplateId=jobTemplateId
        taskJobReField.taskJobId=taskJobId
        taskJobReField.delFlag=False
        taskJobReField.jobTemplateFieldId=fieldId
        taskJobReField.createTime=datetime.now()
        Session.add(taskJobReField)
    # TaskJobDao.updateTaskJob(TaskJob.id==taskJobId,{TaskJob.jobTemplateId:jobTemplateId})
    Session.query(TaskJob).filter(TaskJob.id==taskJobId).update({TaskJob.jobTemplateId:jobTemplateId})
    Session.flush()
    Session.commit()
def parseProxyFile():
    ipList=RequestUtils.parseProxyFile()
    return ProxyDao.addStaticProxyList(ipList);


def search(taskJobParam):
    Session.add(taskJobParam)
    return None

if __name__=="__main__":
    print stringify(taskJobReTemplateDetail("052d698f-3ee3-11e7-a155-e09467f6dff0"))