Exemplo n.º 1
0
def insert_batch(num, clean=True, average_iteration_num=1):
    sum_time = 0.0
    for i in range(0, average_iteration_num):
        starttime = datetime.datetime.now()

        old_session = EngineFactory.create_session_to_so_old(echo=False)
        new_session = EngineFactory.create_session_to_new_so(echo=False)

        old_post_list = old_session.query(PostsRecord).limit(num)
        for post in old_post_list:
            new_session.add(post.make_copy())

        ## 全部写入缓存再一次性commit写入数据库

        new_session.commit()

        endtime = datetime.datetime.now()
        time = (endtime - starttime).total_seconds()
        print("test_insert_batch num={num} time={time}".format(num=num,
                                                               time=time))
        if clean:
            PostsRecord.delete_all(new_session)
        sum_time = sum_time + time

    return {
        "type": "insert batch",
        "num": num,
        "time": sum_time / average_iteration_num
    }
Exemplo n.º 2
0
def insert_separate(num, clean=True, average_iteration_num=1):
    sum_time = 0.0
    for i in range(0, average_iteration_num):

        old_session = EngineFactory.create_session_to_databackup_so(echo=False)
        test_session = EngineFactory.create_session_to_test_so(echo=False)

        old_post_list = old_session.query(PostsRecord).limit(num)

        starttime = datetime.datetime.now()

        for post in old_post_list:
            test_session.add(post.make_copy())
            ## 每插入一条就commit写入数据库
            test_session.commit()

        endtime = datetime.datetime.now()
        time = (endtime - starttime).total_seconds()
        print("test_insert_separate num={num} time={time}".format(num=num,
                                                                  time=time))
        if clean:
            PostsRecord.delete_all(test_session)
        sum_time = sum_time + time
    return {
        "type": "insert separate",
        "num": num,
        "time": sum_time / average_iteration_num
    }
Exemplo n.º 3
0
def insert_batch(num, clean=True, average_iteration_num=1):
    sum_time = 0.0
    for i in range(0, average_iteration_num):

        ## 接下来的三行代码从旧的总表中获取想要的数目的数据,作为之后插入的数据源,其实也可以读文件得到,但是那就太麻烦了
        old_session = EngineFactory.create_session_to_databackup_so(echo=False)
        test_session = EngineFactory.create_session_to_test_so(echo=False)
        old_post_list = old_session.query(PostsRecord).limit(num)

        starttime = datetime.datetime.now()

        for post in old_post_list:
            test_session.add(post.make_copy())

        ## 全部写入缓存再一次性commit写入数据库

        test_session.commit()
        endtime = datetime.datetime.now()

        time = (endtime - starttime).total_seconds()
        print("test_insert_batch num={num} time={time}".format(num=num,
                                                               time=time))
        if clean:
            PostsRecord.delete_all(test_session)
        sum_time = sum_time + time

    return {
        "type": "insert batch",
        "num": num,
        "time": sum_time / average_iteration_num
    }
Exemplo n.º 4
0
def search_one_table_mul_filter(num, average_iteration_num=1):
    sum_time = 0.0
    for i in range(0, average_iteration_num):
        starttime = datetime.datetime.now()

        session = EngineFactory.create_session_to_test_so(echo=False)

        res = session.query(PostsRecord).filter(
            PostsRecord.owner_user_id < num,
            PostsRecord.view_count > 1000).all()
        if len(res) > 0:
            print("search_one_table_mul_filter_result:", len(res), ":", res[0])
        else:
            print("search_one_table_mul_filter_result: null")

        endtime = datetime.datetime.now()
        time = (endtime - starttime).total_seconds()
        print("test_search_one_table_mul_filter num={num} time={time}".format(
            num=num, time=time))
        sum_time = sum_time + time
    return {
        "type": "search_one_table_mul_filter",
        "num": num,
        "time": sum_time / average_iteration_num
    }
Exemplo n.º 5
0
def search_aggregate(num, average_iteration_num=1):
    sum_time = 0.0
    for i in range(0, average_iteration_num):
        starttime = datetime.datetime.now()

        session = EngineFactory.create_session_to_test_so(echo=False)

        res = session.query(
            func.sum(PostsRecord.favorite_count), UsersRecord.display_name,
            UsersRecord.reputation).filter(
                UsersRecord.id < num,
                PostsRecord.owner_user_id == UsersRecord.id).all()
        if len(res) > 0:
            print("search_aggregate_result:", len(res), ":", res[0])
        else:
            print("search_aggregate_result: null")

        endtime = datetime.datetime.now()
        time = (endtime - starttime).total_seconds()
        print("test_search_aggregate num={num} time={time}".format(num=num,
                                                                   time=time))
        sum_time = sum_time + time
    return {
        "type": "search_aggregate",
        "num": num,
        "time": sum_time / average_iteration_num
    }
Exemplo n.º 6
0
def get_game_data(doc_name, game_id: int):
    vote_table_id = (game_id - 1) * 2
    action_table_id = (game_id - 1) * 2 + 1

    vote_df = pd.DataFrame(read_table(doc_name, vote_table_id))
    action_df = pd.DataFrame(read_table(doc_name, action_table_id))

    factory = EngineFactory()
    name = get_game_name_by_id(doc_name, game_id)
    winner = get_game_winner_by_id(doc_name, game_id)
    cleaned_data = {"template": name, "winner": winner}
    parser = factory.construct(name)
    parser.read_data(action_df, vote_df)
    cleaned_data["data"] = parser.parse(winner)

    return cleaned_data
Exemplo n.º 7
0
def show_index():
    engine = EngineFactory.create_engine_to_test_so()
    conn = engine.connect()
    text_sql = 'show index from {table_name}'.format(
        table_name=PostsRecord.__tablename__)
    s = text(text_sql)
    conn.execute(s)
    conn.close()
Exemplo n.º 8
0
def delete_post_test_data_in_test_db():
    """
    删除测试服务器得帖子表数据
    :param num: 要取得测试数据得数目
    :return:
    """
    test_session = EngineFactory.create_session_to_test_so(echo=False)
    PostsRecord.delete_all(test_session)
Exemplo n.º 9
0
def create_post_test_data_in_test_db(num):
    """
    利用全表服务器在测试服务器上创建新的数据,这个创建得是用户表得测试数据
    :param num: 要取得测试数据得数目
    :return:
    """
    ## 接下来的三行代码从旧的总表中获取想要的数目的数据,作为之后插入的数据源,其实也可以读文件得到,但是那就太麻烦了
    old_session = EngineFactory.create_session_to_databackup_so(echo=False)
    new_session = EngineFactory.create_session_to_test_so(echo=False)
    old_post_list = old_session.query(PostsRecord).limit(num)
    count = 0
    for post in old_post_list:
        new_session.add(post.make_copy())
        count = count + 1
        if count >= 100000:
            new_session.commit()
            count = 0
    new_session.commit()
Exemplo n.º 10
0
 def __init__(self, session=None):
     if session is None:
         session = EngineFactory.create_session()
     self.session = session
     self.candidate_generator = APICandidateGenerator(session)
     self.scorer_list = []
     self.scorer_list.append(APITypeScorer(weight=35, api_linker=None))
     self.scorer_list.append(
         MethodParametersScorer(weight=50, api_linker=None))
     self.scorer_list.append(ParentAPIScorer(weight=100.0, api_linker=self))
     self.scorer_list.append(DeclarationScorer(weight=60.0,
                                               api_linker=self))
Exemplo n.º 11
0
def add_score_index():
    try:
        engine = EngineFactory.create_engine_to_test_so()
        conn = engine.connect()
        index_name = "score_index"
        text_sql = 'alter table {table_name} add index {index_name}(score)'.format(
            table_name=PostsRecord.__tablename__, index_name=index_name)
        s = text(text_sql)
        conn.execute(s)
        conn.close()
    except:
        traceback.print_exc()
Exemplo n.º 12
0
def delete_score_view_count_index():
    try:
        engine = EngineFactory.create_engine_to_test_so()
        conn = engine.connect()
        index_name = "score_view_count_index"
        text_sql = 'alter table `{table_name}` drop index `{index_name}`'.format(
            table_name=PostsRecord.__tablename__, index_name=index_name)
        s = text(text_sql)
        conn.execute(s)
        conn.close()
    except:
        traceback.print_exc()
Exemplo n.º 13
0
    def start_import(self):
        self.logger = Logger(self.logger_file_name).get_log()
        if not self.session:
            self.session = EngineFactory.create_session()
        self.init_knowledge_table()

        cur = ConnectionFactory.create_cursor_by_knowledge_table(
            self.data_source_knowledge_table)

        select_sql = "select {primary_key_name},{html_column} from {table}".format(
            primary_key_name=self.primary_key_name,
            html_column=self.html_column,
            table=self.table)
        cur.execute(select_sql)
        data_list = cur.fetchall()
        result_tuples = []
        for i in range(0, len(data_list)):
            row_data = data_list[i]
            primary_key = row_data[0]
            html_text = row_data[1]

            if KnowledgeTableColumnMapRecord.exist_import_record(
                    session=self.session,
                    start_knowledge_table=self.data_source_knowledge_table,
                    end_knowledge_table=self.api_html_table,
                    start_row_id=primary_key,
                    start_row_name=self.html_column):
                self.logger.info("%d has been import to new table",
                                 primary_key)
                continue
            api_html_text = self.create_from_one_row_data(
                primary_key, html_text)

            if api_html_text:
                api_html_text = api_html_text.create(self.session,
                                                     autocommit=False)
                result_tuples.append((api_html_text, primary_key))
            else:
                self.logger.warn("None api_html_text fot %s", str(row_data))
                continue

            if len(result_tuples) > self.commit_step:
                self.commit_to_server_for_column_map(map_tuples=result_tuples)
                result_tuples = []
        self.commit_to_server_for_column_map(map_tuples=result_tuples)
        self.logger.info("import api html completed!")
        cur.close()
Exemplo n.º 14
0
            className = dic['className: ']
            param = dic['param: ']
            javadocComment = dic['javadocComment: ']
            blockComment = dic['blockComment: ']

            update_by_className_methodName_param(className, methodName, param,
                                                 code, javadocComment,
                                                 blockComment)
            counter += 1
            if counter > step:
                counter = 0
                session.commit()
        session.commit()


if __name__ == "__main__":
    schema_name = 'domainkg'
    engine = EngineFactory.create_engine_by_schema_name(schema_name)
    session = EngineFactory.create_session(engine=engine, autocommit=False)
    read_json_by_line('traces.json')

    # trace_list = session.query(Traces).filter_by()
    # for trace in trace_list:
    #     try:
    #         className = trace.className
    #         methodName = trace.methodName
    #         param = trace.param
    #         trace.code = 'code'
    #     except Exception:
    #         print("error")
Exemplo n.º 15
0
 def __init__(self, session=None):
     if session is None:
         session = EngineFactory.create_session()
     self.session = session
Exemplo n.º 16
0
    def get_so_session(self):
        if not self.session:
            self.session = EngineFactory.create_so_session()

        return self.session
Exemplo n.º 17
0
class Wikipedia(Base):
    __tablename__ = 'wiki_pedia'
    id = Column(Integer, primary_key=True)  # auto incrementing
    doc_id = Column(Integer, index=True, unique=True, nullable=False)
    url = Column(String(128), index=True, nullable=False)
    title = Column(String(64), index=True, nullable=False)
    content = Column(LONGTEXT())

    def __init__(self, doc_id=None, url=None, title=None, content=None):
        self.doc_id = doc_id
        self.url = url
        self.title = title
        self.content = content

    def __unicode__(self):
        return self.__repr__()

    def __repr__(self):
        return '<Wikipedia: %r: doc_id=%s >' % (self.title, self.doc_id)


if __name__ == "__main__":
    engine = EngineFactory.create_engine_to_wiki()
    metadata = MetaData(bind=engine)
    # delete all table
    # Base.metadata.drop_all(bind=engine)

    # create the table
    Base.metadata.create_all(bind=engine)
Exemplo n.º 18
0
        user.creation_date = self.creation_date

        user.display_name = self.display_name

        user.last_access_date = self.last_access_date

        user.views = self.views

        user.web_site_url = self.web_site_url

        user.location = self.location

        user.about_me = self.about_me

        user.age = self.age

        user.up_votes = self.up_votes

        user.down_votes = self.down_votes

        user.email_hash = self.email_hash

        return user


if __name__ == "__main__":
    engine = EngineFactory.create_engine_to_test_so()
    metadata = MetaData(bind=engine)
    # create all the table by model
    Base.metadata.create_all(bind=engine)
Exemplo n.º 19
0
    if api_type_string == "Nested":
        return APIEntity.API_TYPE_CLASS
    if api_type_string == "Required":
        return APIEntity.API_TYPE_FIELD
    if api_type_string == "Optional":
        return APIEntity.API_TYPE_FIELD
    if api_type_string == "Field":
        return APIEntity.API_TYPE_FIELD
    if api_type_string == "Enum":
        return APIEntity.API_TYPE_ENUM_CONSTANTS

    api_type = APIEntity.API_TYPE_UNKNOWN
    return api_type


# if __name__ == "__main__":
#     # create table in 75
#     engine = EngineFactory.create_graphdata_engine_to_center()
#     metadata = MetaData(bind=engine)
#     Base.metadata.create_all(bind=engine)


if __name__ == "__main__":
    engine = EngineFactory.create_engine_to_center()
    metadata = MetaData(bind=engine)
    # delete all table
    # Base.metadata.drop_all(bind=engine)

    # create the table
    Base.metadata.create_all(bind=engine)
Exemplo n.º 20
0
Arquivo: model.py Projeto: PaiXue/temp
            try:
                return session.query(POIMethod).filter_by(
                    package_name=self.package_name,
                    class_name=self.class_name,
                    return_type=self.return_type,
                    method_name=self.method_name,
                    description=self.description,
                    type=self.type).first()
            except Exception:
                traceback.print_exc()
                session.rollback()
            return None

    def find_or_create(self, session, autocommit=True):
        remote_instance = self.get_remote_object(session)
        if not remote_instance:
            session.add(self)
            if autocommit:
                session.commit()
            return self
        else:
            return remote_instance


if __name__ == "__main__":
    engine = EngineFactory.create_engine_by_schema_name('domainkg')
    metadata = MetaData(bind=engine)

    # create the table
    Base.metadata.create_all(bind=engine)