示例#1
0
def get_repository_count_query(username=""):
    if username == "":
        query = Repository.select().count()
    else:
        user = User.get(User.username == username)
        query = Repository.select().where(Repository.user == user).count()
    return query
示例#2
0
def delete_repository_query(repo_id):
    repository = Repository().get_by_id(repo_id)
    task.write_task_status(repo=repository, msg=f"{repository.name} deleting")
    try:
        manager = LinuxRepoManager(repository)
        manager.delete()
        repository.delete_instance()
    except Exception:
        pass
示例#3
0
def set_next_update_date(repo: Repository):
    date = datetime.datetime.now()
    date += relativedelta(
        years=repo.schedule_year,
        months=repo.schedule_month,
        days=repo.schedule_day,
        hours=repo.schedule_hour,
        minutes=repo.schedule_minute
    )
    repo.schedule_next_update = date
    repo.save()
示例#4
0
def save_user_repos(username):
    try:
        user = User.objects.get(user_name=username)
    except User.DoesNotExist:
        user = User(user_name=username)
        user.save()

    repos = get_repos(username)

    for repo in repos:
        try:
            n_repo = Repository.objects.get(github_id=repo['id'])
        except Repository.DoesNotExist:
            n_repo = Repository(github_id=repo['id'], owner=user)
            n_repo.save()
示例#5
0
def get_repository_query(repo_id):
    repository = Repository.get_by_id(repo_id)
    return {
        "id": repo_id,
        "name": repository.name,
        "mirror_url": repository.mirror_url,
        "mirror_zpool": repository.mirror_zpool,
        "mirror_location": repository.mirror_location,
        "mirror_type": repository.mirror_type,
        "mirror_args": repository.mirror_args,
        "user": repository.user.username,
        "mirror_init": repository.mirror_init,
        "schedule_status": repository.schedule_status,
        "schedule_run": repository.schedule_run,
        "schedule_number": repository.schedule_number,

        "schedule_minute": repository.schedule_minute,
        "schedule_hour": repository.schedule_hour,
        "schedule_day": repository.schedule_day,
        "schedule_month": repository.schedule_month,
        "schedule_year": repository.schedule_year,

        "created_at": repository.created_at,
        "updated_at": repository.updated_at,
    }
示例#6
0
def update_repositories():
    counter = 0
    repositories = get_oca_repositories()
    for repository in repositories:
        # start: update repository
        if Repository.query.filter(Repository.repository == repository.name).first():
            repo = Repository.query.filter(Repository.repository == repository.name).first()
            repo.description = repository.description
            db.session.commit()
        else:
            repo = Repository(repository=repository.name, description=repository.description,
                              in_scope='False')

            db.session.add(repo)
            db.session.commit()
        # end: update repository
        # start: update-functions
        # update modules for repository
        update_modules(repository)
        # count modules per version in repository (not in shown in any table)
        count_modules(repository)

        get_installable_and_get_readme(repository)
        get_readme_repository(repository)
        # end: update-functions

        if counter % 5 == 0:
            print('loop {}'.format(counter))
        counter += 1
    pass
示例#7
0
def get_repository_list_query(offset=0, limit=15, username="", my=False):
    repository_list = []
    user = User.get(User.username == username)
    if not my:
        if user.group != 0:
            return "-1"
        query = Repository.select().offset(offset).limit(limit)
    else:
        query = Repository.select().where(Repository.user == user).offset(offset).limit(limit)

    for repository in query:
        repository_list.append({
            "id": repository.get_id(),
            "name": repository.name,
            "user": repository.user.username,
            "schedule_status": repository.schedule_status,
            "schedule_run": repository.schedule_run,
            "updated_at": repository.updated_at
        })
    return repository_list
示例#8
0
def upload_repositories_to_mongo():
    """
    Upload data about repositories to MongoDB, if we've got already info
    about a repo, then we just update existing.
    """

    lock_upload()

    repositories = []
    # GitHub API provides us with 1000 items, so we can paginate through
    # results 10 times.
    for page in range(1, 11):
        request = get_repositories_by_page(page=page)
        if request.get('message'):
            return request['message']

        for item in request.get('items'):
            repositories.append(
                UpdateOne({'full_name': item['full_name']}, {
                    '$set': {
                        'html_url': item['html_url'],
                        'description': item['description'],
                        'stargazers_count': item['stargazers_count'],
                        'language': item['language']
                    }
                },
                          upsert=True))

        # Perform unordered bulk write to DB.
        try:
            Repository._get_collection().bulk_write(repositories)
        except BulkWriteError as e:
            return str(e)

    # GitHub allows us to make 10 requests per minute, so we need to take
    # a nap.
    sleep(60)

    unlock_upload()
    return 'Success'
    def __run(self):
        num_thread = self.config['daemon'].getint('thread', self.args.thread)
        if num_thread < 1:
            num_thread = 1
        # create new process group, become its leader
        # os.setpgrp()
        # Запуск рабочих потоков
        for idx in range(num_thread):
            t = WorkerThread(self.config, self.logger, self.task_queue)
            self.threads.append(t)
            t.start()

        db = connect(self.config['database'].get('database_url'))
        db.connect(reuse_if_open=True)
        db.bind([Repository])

        while self.alive:
            self.logger.info("The daemon is still working")
            # репы для обновления или у которых init = False
            repos_to_update = Repository.select().where(
                (Repository.schedule_next_update <= datetime.now())
                | (not Repository.mirror_init)).order_by(
                    Repository.schedule_next_update)

            # определяем дату следующей обновы
            for repo in repos_to_update:
                queries.repository.set_next_update_date(repo)

            # send thirty task requests to the worker
            # Отмена тасков не нужна, так как таймаут таска расчитывается
            # на основе расписания и таск живет не дольше таймаута.

            # определяем timeout и заполняем task_queue
            for repo in repos_to_update:
                timeout = (repo.schedule_next_update -
                           datetime.now()).total_seconds()
                self.task_queue.put(MirrorTask(repo.get_id(), timeout))
            time.sleep(60)

        db.close()
        # Отправка нотификации завершения:
        for _ in self.threads:
            self.task_queue.put(None)
        # block until all daemon are done
        self.task_queue.join()
        # Завершение потоков
        for t in self.threads:
            t.join()
        self.threads = []
        # Демон завершился.
        self.logger.info("The daemon has been stopped")
示例#10
0
 def test_increment_id(self):
     repository = Repository()
     tweet1 = Tweet("Ah que coucou.")
     repository.add(tweet1)
     self.assertEqual(tweet1.id, 1)
     tweet2 = Tweet("Tirelipimpon sur le Chihuahua.")
     repository.add(tweet2)
     self.assertEqual(tweet2.id, 2)
示例#11
0
    def __processing_task(self, task):
        try:
            self.logger.info(f'Thread: {self.tid}. Working on {task.id}')

            repo = Repository.get_by_id(task.id)
            # защита от двойного обновления (процесс обн. еще не закончился, а по расписанию наступил уже новый)
            already_in_queue = QueueTask.select().where(
                QueueTask.repository == repo)
            if already_in_queue:
                if already_in_queue.pid == os.getpid():
                    self.logger.warning(
                        f'This repo is currently being updated')
                    return
                self.logger.warning(f'Repo info is outdated. Refreshing...')
                already_in_queue.delete_instance()
            QueueTask.create(repository=repo, pid=os.getpid())

            try:
                queries.task.write_task_status(repo=repo,
                                               msg="Updating repository")

                out = self.__subprocess_run(
                    args=['update_repo.py', task.id],
                    stdout=subprocess.PIPE,
                    stderr=subprocess.STDOUT,
                    timeout=task.timeout,
                )

                queries.task.write_task_status(repo=repo,
                                               msg=(out.returncode,
                                                    out.stdout[-20000:]))
                self.logger.info(
                    f'Thread: {self.tid}. Finished {task.id} ({out.returncode})'
                )
            except subprocess.TimeoutExpired as e:
                queries.task.write_task_status(repo=repo,
                                               msg="Timeout exception")
                queries.task.write_task_status(repo=repo,
                                               msg=e.output[-20000:])
                self.logger.warning(
                    f'Thread: {self.tid}. Task {task.id} interrupted by timeout'
                )
            finally:
                QueueTask.delete().where(QueueTask.repository == repo)
        except Exception as e:
            s = str(e)
            self.logger.warning(
                f'Thread: {self.tid}. Task {task.id} interrupted by exception: {s}'
            )
示例#12
0
文件: run.py 项目: khanhtdk/countpy
    def __init__(self, search_order=None, verbose=False, mode=None,
                 anonymous=False, threads=None, logfile=None):
        self.slices = None
        self.repos = None
        self._logger = None
        self._run_event = Event()

        # Anonymous or authentic
        if anonymous:
            threads = int(threads) if threads else 1
            auths = [(None, None)] * threads
        else:
            auths = config.items('credentials')
            assert auths, 'No Github credential found'

        # Prepare search time slices
        mode = mode or 'both'
        if mode == 'both' or mode == 'search-only':
            self.slices = Queue()
            period = config.get('search_period', 'period')
            window = config.get('search_period', 'slice')
            reverse = search_order.lower() == 'desc' if search_order else None
            for time_slice in slice_period(period, window, reverse):
                self.slices.put_nowait(time_slice)

        # Query all local repositories
        if mode == 'both' or mode == 'retrieve-only':
            self.repos = Queue()
            for repo in Repository.query_all(name_only=True):
                self.repos.put_nowait(repo)

        # Init log server
        self._log_queue = Queue()
        self.logsrv = LogServer(self._log_queue, verbose, logfile)

        # Init search workers
        self._exc_queue = Queue()
        self.workers = [
            SearchWorker(user, passwd, self.slices, self.repos,
                         self._run_event, self._log_queue, self._exc_queue)
            for user, passwd in auths
        ]

        # Init snapshot method
        self.snapshot = Snapshot()
示例#13
0
def create_repository_query(json_repository, username):
    user = User.get(User.username == username)

    repository = Repository.create(
        name=json_repository["name"],
        mirror_url=json_repository["mirror_url"],
        mirror_zpool=json_repository["mirror_zpool"],
        mirror_location=json_repository["mirror_location"],
        mirror_type=json_repository["mirror_type"],
        mirror_args=json_repository["mirror_args"],

        user=user,

        schedule_status=json_repository["schedule_status"],
        schedule_number=json_repository["schedule_number"],

        schedule_minute=json_repository["schedule_minute"],
        schedule_hour=json_repository["schedule_hour"],
        schedule_day=json_repository["schedule_day"],
        schedule_month=json_repository["schedule_month"],
        schedule_year=json_repository["schedule_year"]
    )
    task.write_task_status(repo=repository, msg=f"{repository.name} creation")
示例#14
0
def init_mirror(repo: Repository):
    repo.mirror_init = True
    repo.save()
示例#15
0
import sys

from playhouse.db_url import connect

import api_config
from app.models import Repository
from mirror.repository import LinuxRepoManager
from queries.repository import init_mirror


def run(repository: Repository):
    manager = LinuxRepoManager(repository)
    if not repository.mirror_init:
        if manager.filesystem_exist():
            manager.delete()
        manager.create()
        init_mirror(repository)
    # штатный процесс обновления
    manager.update()


if __name__ == '__main__':
    repo_id = sys.argv[1]
    # соединение с БД
    db = connect(api_config.DATABASE_URL, reuse_if_open=True)
    # привязываем модель ORM к БД
    db.bind([Repository])
    repo = Repository.get_by_id(repo_id)
    run(repo)
    db.close()
示例#16
0
def edit_repository_query(repo_id, json_repository):
    repository = Repository().get_by_id(repo_id)

    if repository_exist(json_repository['name']) and repository.name != json_repository['name']:
        return "-1"

    repository.name = json_repository["name"]
    repository.mirror_url = json_repository["mirror_url"]
    repository.mirror_zpool = json_repository["mirror_zpool"]
    repository.mirror_location = json_repository["mirror_location"]
    repository.mirror_type = json_repository["mirror_type"]
    repository.mirror_args = json_repository["mirror_args"]

    repository.schedule_status = json_repository["schedule_status"]
    repository.schedule_run = json_repository["schedule_run"]
    repository.schedule_number = json_repository["schedule_number"]

    repository.schedule_minute = json_repository["schedule_minute"]
    repository.schedule_hour = json_repository["schedule_hour"]
    repository.schedule_day = json_repository["schedule_day"]
    repository.schedule_month = json_repository["schedule_month"]
    repository.schedule_year = json_repository["schedule_year"]

    repository.updated_at = datetime.datetime.now()
    repository.save()

    task.write_task_status(repo=repository, msg=f"{repository.name} editing")
    return 0
示例#17
0
 def test_instance_variables(self):
     repository = Repository()
     self.assertEqual(len(repository.tweets), 0)
示例#18
0
def repository_exist(name):
    try:
        Repository.get(Repository.name == name)
    except DoesNotExist:
        return False
    return True
示例#19
0
from app.models import PageView
from app.models import Repository
from app.utils import request_to_record
from flask import Flask
from flask import request
from flask_debugtoolbar import DebugToolbarExtension
from flask_login import LoginManager
from flask_migrate import Migrate
from slack_time import get_slack_time

login_manager = LoginManager()
toolbar = DebugToolbarExtension()
migrate = Migrate()

event_system = EventSystem()
repo = Repository()

slack = get_slack_time()


def create_app():
    app = Flask(__name__)
    configure_app(app)

    db.init_app(app)
    toolbar.init_app(app)
    login_manager.init_app(app)
    event_system.init_app(app)
    migrate.init_app(app, db)
    repo.init_db(db.session)
示例#20
0
 def test_get(self):
     repository = Repository()
     tweet = Tweet("Le gras c'est la vie.")
     repository.add(tweet)
     self.assertEqual(tweet, repository.get(1))
     self.assertIsNone(repository.get(2))
示例#21
0
    def search_repos_in_slice(self, time_slice):
        self._logger.info('Searching time slice: %s' % time_slice)
        self._search.search(created=time_slice)
        for repo in self._search.traverse():
            assert self.is_running()
            if Repository.exists(repo.full_name):
                self._logger.info(
                    self._repo_fmt.format(label='Existed', **repo.__dict__))
                continue
            self._logger.info(
                self._repo_fmt.format(label='Found', **repo.__dict__))

            # Newly create repo in database
            newrepo = Repository(repo.full_name)
            newrepo.set_id(repo.id)
            newrepo.set_url(repo.url)
            newrepo.set_contents_url(repo.contents_url)
            newrepo.commit_changes()

            # Queue repository for later retrieving
            if self._repos is not None:
                self._repos.put(repo.full_name)
示例#22
0
def reset_repository_query(repo_id):
    repository = Repository().get_by_id(repo_id)
    task.write_task_status(repo=repository, msg=f"{repository.name} resetting")
示例#23
0
 def test_add(self):
     repository = Repository()
     tweet = Tweet("Coucou petite perruche.")
     repository.add(tweet)
     self.assertEqual(len(repository.tweets), 1)
示例#24
0
    def retrieve_files_in_repo(self, repo_name):
        repo = Repository(repo_name)

        # Skip if repository was done already
        if repo.retrieved:
            self._logger.info(
                self._repo_fmt.format(label='Already done:',
                                      full_name=repo.name,
                                      id=repo.id,
                                      url=repo.url))
            return

        # Skip if repository has no contents URL
        if not repo.contents_url:
            self._logger.info(
                self._repo_fmt.format(label='No contents URL found:',
                                      full_name=repo.name,
                                      id=repo.id,
                                      url=repo.url))
            return

        # Do retrieving contents from GitHub
        self._logger.info(
            self._repo_fmt.format(label='Retrieving:',
                                  full_name=repo.name,
                                  id=repo.id,
                                  url=repo.url))

        added = False
        for file in self._retriever.traverse(repo.contents_url):
            assert self.is_running()
            if not Repository.expects_file(file.path):
                self._logger.info('  (-) %s' % file.path)
                continue
            self._logger.info('  (+) %s' % file.path)
            self._retriever.retrieve_content(file)
            repo.add_file(file.path, file.decoded_content)
            if not added:
                added = True

        # Find packages if files found
        if added:
            self._logger.info('  --> Finding packages...')
            repo.find_packages()

        # Do nothing if no file found
        else:
            self._logger.info('  --> No expected files found.')

        # Save repository
        self._logger.info('  --> Saving repository...')
        repo.set_retrieved(True)
        repo.commit_changes()
示例#25
0
def deinit_mirror(repo: Repository):
    repo.mirror_init = False
    repo.save()