Exemplo n.º 1
0
class Race(SlugModel):
    """
    Race model.
    """
    slug_fields = ['state_postal', 'office_name', 'seat_name']

    slug = CharField(max_length=255)

    # data from init
    race_id = CharField(primary_key=True)
    state_postal = CharField(max_length=255)
    office_id = CharField(max_length=255)
    office_name = CharField(max_length=255)
    seat_name = CharField(null=True)
    seat_number = IntegerField(null=True)
    race_type = CharField()
    last_updated = DateTimeField(null=True)       # Stored as EST (US/Eastern)

    # data from update
    precincts_total = IntegerField(null=True)
    precincts_reporting = IntegerField(null=True)
    office_description = CharField(null=True)
    uncontested = BooleanField(default=False)
    is_test = BooleanField(default=False)
    number_in_runoff = CharField(null=True)

    # call data
    accept_ap_call = BooleanField(default=True)
    ap_called = BooleanField(default=False)
    ap_called_time = DateTimeField(null=True)     # Stored as UTC
    npr_called = BooleanField(default=False)
    npr_called_time = DateTimeField(null=True)    # Stored as UTC

    # NPR data
    poll_closing_time = DateTimeField(null=True)  # Stored as EST (US/Eastern)
    featured_race = BooleanField(default=False)
    previous_party = CharField(max_length=5, null=True)
    obama_gop = BooleanField(default=False)
    romney_dem = BooleanField(default=False)
    bluedog = BooleanField(default=False)
    female_candidate = BooleanField(default=False)
    female_incumbent = BooleanField(default=False)
    rematch_result = TextField(null=True, default=None)
    rematch_description = TextField(null=True, default=None)
    freshmen = BooleanField(default=False)
    ballot_measure_description = CharField(max_length=255, null=True)

    def __unicode__(self):
        return u'%s: %s-%s' % (
            self.office_name,
            self.state_postal,
            self.seat_name
        )

    def get_winning_party(self):
        """
        Return the winning party in this race, if any.
        """
        if self.is_called():
            for candidate in self.candidates.where(Candidate.race == self):
                if self.accept_ap_call:
                    if candidate.ap_winner:
                        if candidate.party == 'GOP':
                            return 'gop'
                        elif candidate.party == 'Dem':
                            return 'dem'
                        else:
                            return 'other'

                if candidate.npr_winner:
                    if candidate.party == 'GOP':
                        return 'gop'
                    elif candidate.party == 'Dem':
                        return 'dem'
                    else:
                        return 'other'

        return None

    def is_runoff(self):
        """
        Did the race lead to a runoff?
        """
        if self.accept_ap_call and self.number_in_runoff:
            return True
        else:
            return False

    def get_runoff_winners(self):
        """
        Get candidates who will appear in a runoff
        """
        if self.accept_ap_call and self.number_in_runoff:
            return self.candidates.where(Candidate.ap_runoff_winner == True)
        else:
            return None

    def is_called(self):
        """
        Has this race been called?
        """
        if self.accept_ap_call:
            return self.ap_called
        else:
            return self.npr_called

        return False

    def is_reporting(self):
        """
        Are precincts reporting?
        """
        return bool(self.precincts_reporting)

    def party_changed(self):
        """
        Did the party change?
        """
        winner = self.get_winning_party()

        if winner:
            return winner != self.previous_party

        return None

    def get_called_time(self):
        """
        Get when this race was called.
        """
        if self.accept_ap_call:
            return self.ap_called_time
        else:
            return self.npr_called_time

    def precincts_reporting_percent(self):
        """
        Get precent precincts reporting
        """
        if self.precincts_total > 0:
            ratio = Decimal(self.precincts_reporting) / Decimal(self.precincts_total)
        else:
            ratio = 0
        return ratio * 100

    def has_incumbent(self):
        """
        Check if this Race has an incumbent candidate.
        """
        return bool(self.candidates.where(Candidate.incumbent == True).count())

    def count_votes(self):
        """
        Count the total votes cast for all candidates.
        """
        return self.candidates.select(fn.Sum(Candidate.vote_count)).scalar()

    def flatten(self, update_only=False):
        UPDATE_FIELDS = [
            'id',
            'precincts_total',
            'precincts_reporting',
            'number_in_runoff'
        ]

        INIT_FIELDS = [
            'slug',
            'state_postal',
            'office_name',
            'seat_name',
            'seat_number',
            'race_type' ,
            'last_updated',
            'office_description',
            'uncontested',
            'featured_race',
            'poll_closing_time',
        ]

        flat = {
            'candidates': []
        }

        for field in UPDATE_FIELDS:
            flat[field] = getattr(self, field)

        flat['called'] = self.is_called()
        flat['called_time'] = self.get_called_time()

        if not update_only:
            for field in INIT_FIELDS:
                flat[field] = getattr(self, field)

        for candidate in self.candidates:
            data = candidate.flatten(update_only=update_only)

            if self.accept_ap_call and candidate.ap_winner:
                data['winner'] = True
            elif candidate.npr_winner:
                data['winner'] = True
            else:
                data['winner'] = False

            flat['candidates'].append(data)

        return flat

    def is_uncontested(self):
        """
        Return true if uncontested
        """
        if self.candidates.count() == 1:
            return True
        else:
            return False

    def top_candidates(self):
        """
        Return (dem, gop) pair of top candidates
        """

        if self.race_id in USE_LEADING_CANDIDATES.keys():
            candidates = USE_LEADING_CANDIDATES[self.race_id]
            return self._leading_top_candidates(candidates)
        else:
            return self._static_top_candidates()

    def _leading_top_candidates(self, candidates):
        """
        Get top candidates using total votes
        """
        candidates_query = self.candidates\
            .where(self.candidates.model_class.candidate_id << candidates)\
            .order_by(self.candidates.model_class.vote_count.desc())

        candidates = []
        for candidate in candidates_query:
            if candidate.get_party() == 'dem':
                dem = candidate
            if candidate.get_party() == 'gop':
                gop = candidate
            if candidate.get_party() == 'other':
                other = candidate

            candidates.append(candidate)

        if candidates[-1].get_party() == 'other':
            return (dem, gop)
        if candidates[-1].get_party() == 'gop':
            return (dem, other)
        if candidates[-1].get_party() == 'dem':
            return (other, gop)

    def _static_top_candidates(self):
        """
        Get top candidates using dem, gop pattern (with possibility for overrides)
        """
        try:
            if self.race_id in DEMOCRAT_OVERRIDES.keys():
                candidate_id = DEMOCRAT_OVERRIDES[self.race_id]
                dem = self.candidates.where(self.candidates.model_class.candidate_id == candidate_id).get()
            else:
                dem = self.candidates.where(self.candidates.model_class.party == "Dem").get()
        except Candidate.DoesNotExist:
            dem = None

        try:
            if self.race_id in REPUBLICAN_OVERRIDES.keys():
                candidate_id = REPUBLICAN_OVERRIDES[self.race_id]
                gop = self.candidates.where(self.candidates.model_class.candidate_id == candidate_id).get()
            else:
                gop = self.candidates.where(self.candidates.model_class.party == "GOP").get()
        except Candidate.DoesNotExist:
            gop = None

        return (dem, gop)

    def top_choices(self):
        """
        Return (yes, no) or (for, against) pair
        """
        yes = self.candidates.where((self.candidates.model_class.last_name == 'Yes') | (self.candidates.model_class.last_name == 'For')).get()
        no = self.candidates.where((self.candidates.model_class.last_name == 'No') | (self.candidates.model_class.last_name == 'Against')).get()
        return (yes, no)

    @classmethod
    def recently_called(self, delta=15):
        """
        Get recently called for last <delta> minutes, limited to <limit> results.
        """
        now = datetime.utcnow()
        then = now - timedelta(minutes=delta)
        recent = self.select().where(
                ((self.npr_called == True) & (self.npr_called_time > then)) |
                ((self.accept_ap_call == True) & (self.ap_called == True) & (self.ap_called_time > then))
            )\
            .order_by(self.npr_called_time.desc(), self.ap_called_time.desc())
        return recent
Exemplo n.º 2
0
class User(UserMixin, Model):
    id = AutoField()
    username = CharField(unique=True)
    email = CharField(unique=True)
    password = CharField(max_length=100)
    joined_at = DateTimeField(default=datetime.datetime.now)
    is_admin = BooleanField(default=False)

    class Meta:
        database = DATABASE
        order_by = ('joined_at', )

    def get_entries(self):
        return Entry.select().where(Entry.user == self)

    @classmethod
    def create_user(cls, username, email, password, admin=False):
        email = email.lower()
        try:
            with DATABASE.transaction():
                cls.create(username=username,
                           email=email,
                           password=HASHER.hash(password),
                           is_admin=admin)
        except IntegrityError:
            raise ValueError('User already exists')

    @classmethod
    def create_api_user(cls, username, email, password, admin=False, **kwargs):
        email = email.lower()
        try:
            cls.select().where((cls.email == email)
                               | (cls.username**username)).get()
        except cls.DoesNotExist:
            user = cls(username=username, email=email)
            user.password = user.set_password(cls, password)
            user.is_admin = admin
            user.save()
            return user
        else:
            raise (Exception("User with that email or username exists."))

    @staticmethod
    def verify_auth_token(token):
        serializer = Serializer(app.config["SECRET_KEY"])
        try:
            data = serializer.loads(token)
        except (SignatureExpired, BadSignature):
            return None
        else:
            user = User.get(User.id == data['id'])
            return user

    @staticmethod
    def set_password(self, password):
        return HASHER.hash(password)

    def verify_password(self, password):
        return HASHER.verify(self.password, password)

    def generate_auth_token(self, expires=3600):
        serializer = Serializer(app.config["SECRET_KEY"], expires_in=expires)
        return serializer.dumps({'id': self.id})
Exemplo n.º 3
0
class Exercise(BaseModel):
    subject = CharField()
    date = DateTimeField()
    users = ManyToManyField(User, backref='exercises')
    is_archived = BooleanField(default=False, index=True)
    due_date = DateTimeField(null=True)
    notebook_num = IntegerField(default=0)
    order = IntegerField(default=0, index=True)
    course = ForeignKeyField(Course, backref='exercise')
    number = IntegerField(default=1)

    class Meta:
        indexes = ((('course_id', 'number'), True), )

    def open_for_new_solutions(self) -> bool:
        if self.due_date is None:
            return not self.is_archived
        return datetime.now() < self.due_date and not self.is_archived

    @classmethod
    def get_highest_number(cls, course: Course):
        return (cls.select(fn.MAX(cls.number)).where(
            cls.course == course).group_by(cls.course).scalar())

    @classmethod
    def is_number_exists(cls, course: Course, number: int) -> bool:
        return (cls.select().where(cls.course == course,
                                   cls.number == number).exists())

    @classmethod
    def get_objects(
        cls,
        user_id: int,
        fetch_archived: bool = False,
        from_all_courses: bool = False,
    ):
        user = User.get(User.id == user_id)
        exercises = (cls.select().join(Course).join(UserCourse).where(
            UserCourse.user == user_id).switch().order_by(
                UserCourse.date, Exercise.number, Exercise.order))
        if not from_all_courses:
            exercises = exercises.where(
                UserCourse.course == user.last_course_viewed, )
        if not fetch_archived:
            exercises = exercises.where(cls.is_archived == False)  # NOQA: E712
        return exercises

    def as_dict(self) -> Dict[str, Any]:
        return {
            'exercise_id': self.id,
            'exercise_name': self.subject,
            'is_archived': self.is_archived,
            'notebook': self.notebook_num,
            'due_date': self.due_date,
            'exercise_number': self.number,
            'course_id': self.course.id,
            'course_name': self.course.name,
        }

    @staticmethod
    def as_dicts(exercises: Iterable['Exercise']) -> ExercisesDictById:
        return {exercise.id: exercise.as_dict() for exercise in exercises}

    def __str__(self):
        return self.subject
Exemplo n.º 4
0
class JobPost(BaseModel):
    title = TextField(null=False,
                      help_text="Title of the job post",
                      verbose_name="Title")
    summary = TextField(null=True,
                        help_text="Summary of the job post",
                        verbose_name="Summary")
    content = TextField(null=False,
                        help_text="Content of the job post",
                        verbose_name="Content")
    dt_submitted = DateTimeField(
        null=False,
        default=datetime.now,
        help_text="When was the job post submitted",
        verbose_name="Datetime Submitted",
        index=True,
    )
    dt_updated = DateTimeField(null=False,
                               default=datetime.now,
                               help_text="When was the job post updated",
                               verbose_name="Datetime Updated")
    dt_published = DateTimeField(null=True,
                                 help_text="When was the job post published",
                                 verbose_name="Datetime Published")
    state = CharField(
        null=False,
        default="waiting",
        choices=[("waiting", "waiting"), ("published", "published"),
                 ("rejected", "rejected")],
        help_text="Current state of the job post",
        verbose_name="State",
    )
    approved_by = ForeignKeyField(
        AdminUser,
        null=True,
        default=None,
        backref="adminuser",
        help_text="Who approved the job post",
        verbose_name="Approved by",
    )

    company = CharField(null=False,
                        help_text="Company that posted the job",
                        verbose_name="Company")
    phone = CharField(null=True,
                      help_text="Phone number to contact",
                      verbose_name="Phone Number")
    location = CharField(null=False,
                         help_text="Where is the job located",
                         verbose_name="Job Location")
    email = CharField(null=True,
                      help_text="Email to contact",
                      verbose_name="Email Address")
    contact_info = CharField(null=False,
                             help_text="Person to contact",
                             verbose_name="Contact info")
    image_path = CharField(null=True,
                           help_text="Image for the job post",
                           verbose_name="Image Path in filesystem")

    @classmethod
    def create(
        cls,
        title: str,
        content: str,
        company: str,
        location: str,
        contact_info: str,
        email: Optional[str] = None,
        phone: Optional[str] = None,
        summary: Optional[str] = None,
        dt_submitted: Optional[datetime] = None,
        dt_updated: Optional[datetime] = None,
        dt_published: Optional[datetime] = None,
        state: str = "waiting",
        approved_by: Optional[AdminUser] = None,
        image_path: Optional[str] = None,
    ):
        if not dt_submitted:
            dt_submitted = datetime.now()
        if not dt_updated:
            dt_updated = datetime.now()

        if not email and not phone:
            raise ValueError("One of email or phone must be provided")

        new_job = super().create(
            title=title,
            content=content,
            company=company,
            location=location,
            contact_info=contact_info,
            email=email,
            phone=phone,
            summary=summary,
            dt_submitted=dt_submitted,
            dt_updated=dt_updated,
            dt_published=dt_published,
            state=state,
            approved_by=approved_by,
            image_path=image_path,
        )
        new_job.save()
        return new_job
Exemplo n.º 5
0
    class DbFactFinData(ModelBase):
        """
        Candlestick bar data for database storage.
        Index is defined unique with datetime, interval, symbol
        """

        id = AutoField()
        stock_id: str = CharField(max_length=10)
        date: datetime = DateTimeField()
        date_report: datetime = DateTimeField()

        factor_category: str = CharField(max_length=50)
        factor_name: str = CharField(max_length=50)
        factor_name_chinese: str = CharField()
        factor_value: float = FloatField(null=True)
        factor_type: str = CharField(max_length=20)

        datetime_update: datetime = DateTimeField()

        class Meta:
            database = db
            indexes = ((("stock_id", "date", "factor_category", "factor_name", "factor_type"), True),)

        @staticmethod
        def from_factor(factor: FactorData, DataClass: type) -> "ModelBase":
            """
            Generate DbFactorData object from FactorData.
            """

            db_bar = DataClass()

            db_bar.stock_id = factor.stock_id
            db_bar.date = factor.date  # 公布期
            db_bar.date_report = factor.date_report  # 报告期

            db_bar.factor_category = factor.factor_category
            db_bar.factor_name = factor.factor_name
            db_bar.factor_name_chinese = factor.factor_name_chinese
            db_bar.factor_value = factor.factor_value
            db_bar.factor_type = factor.factor_type

            db_bar.datetime_update = datetime.now()

            return db_bar

        def to_bar(self):
            """
            Generate GroupData object from DbGroupData.
            """
            factor = FactorData()
            return factor

        @staticmethod
        def save_all(objs: List[ModelBase], DataClass: ModelBase):
            """
            save a list of objects, update if exists.
            """
            dicts = map(lambda x: x.to_dict(), objs)
            with db.atomic():
                if driver is Driver.POSTGRESQL:
                    for bar in dicts:
                        DataClass.insert(bar).on_conflict(
                            update=bar,
                            conflict_target=(
                                DataClass.stock_id,
                                DataClass.date,
                            ),
                        ).execute()
                else:
                    i = 1
                    num = 5000
                    for c in chunked(dicts, num):
                        sta = time.time()
                        print(f"Insert data to database {DataClass.__name__}: {i}-{i + num - 1}")
                        DataClass.insert_many(c).on_conflict_replace().execute()
                        print(time.time() - sta)
                        i += num

        def query_data(self, factor_name: str):
            factor_sql = f"SELECT DATE_FORMAT(`date`,'%Y-%m-%d') as `date`, stock_id, factor_value as {factor_name} " \
                         f"FROM dbfactfindata " \
                         f"WHERE factor_name = '{factor_name}' "  # TODO 名称
            res = pd.read_sql(factor_sql, con=MySQL_con)
            return None if res.empty else res
Exemplo n.º 6
0
Arquivo: models.py Projeto: vixus0/wit
class Code(BaseModel):
    witness = ForeignKeyField(Witness, backref='codes')
    phrase = CharField(index=True, default=generate_phrase)
    generated = DateTimeField(default=datetime.now)
    used = DateTimeField(null=True)
Exemplo n.º 7
0
    class DbTickData(ModelBase):
        """
        Tick data for database storage.

        Index is defined unique with (datetime, symbol)
        """

        id = AutoField()

        symbol: str = CharField()
        exchange: str = CharField()
        datetime: datetime = DateTimeField()

        name: str = CharField()
        volume: float = FloatField()
        last_price: float = FloatField()
        last_volume: float = FloatField()
        limit_up: float = FloatField()
        limit_down: float = FloatField()

        open_price: float = FloatField()
        high_price: float = FloatField()
        low_price: float = FloatField()
        pre_close: float = FloatField()

        bid_price_1: float = FloatField()
        bid_price_2: float = FloatField(null=True)
        bid_price_3: float = FloatField(null=True)
        bid_price_4: float = FloatField(null=True)
        bid_price_5: float = FloatField(null=True)

        ask_price_1: float = FloatField()
        ask_price_2: float = FloatField(null=True)
        ask_price_3: float = FloatField(null=True)
        ask_price_4: float = FloatField(null=True)
        ask_price_5: float = FloatField(null=True)

        bid_volume_1: float = FloatField()
        bid_volume_2: float = FloatField(null=True)
        bid_volume_3: float = FloatField(null=True)
        bid_volume_4: float = FloatField(null=True)
        bid_volume_5: float = FloatField(null=True)

        ask_volume_1: float = FloatField()
        ask_volume_2: float = FloatField(null=True)
        ask_volume_3: float = FloatField(null=True)
        ask_volume_4: float = FloatField(null=True)
        ask_volume_5: float = FloatField(null=True)

        class Meta:
            database = db
            indexes = ((("datetime", "symbol", "exchange"), True), )

        @staticmethod
        def from_tick(tick: TickData):
            """
            Generate DbTickData object from TickData.
            """
            db_tick = DbTickData()

            db_tick.symbol = tick.symbol
            db_tick.exchange = tick.exchange.value
            db_tick.datetime = tick.datetime
            db_tick.name = tick.name
            db_tick.volume = tick.volume
            db_tick.last_price = tick.last_price
            db_tick.last_volume = tick.last_volume
            db_tick.limit_up = tick.limit_up
            db_tick.limit_down = tick.limit_down
            db_tick.open_price = tick.open_price
            db_tick.high_price = tick.high_price
            db_tick.low_price = tick.low_price
            db_tick.pre_close = tick.pre_close

            db_tick.bid_price_1 = tick.bid_price_1
            db_tick.ask_price_1 = tick.ask_price_1
            db_tick.bid_volume_1 = tick.bid_volume_1
            db_tick.ask_volume_1 = tick.ask_volume_1

            if tick.bid_price_2:
                db_tick.bid_price_2 = tick.bid_price_2
                db_tick.bid_price_3 = tick.bid_price_3
                db_tick.bid_price_4 = tick.bid_price_4
                db_tick.bid_price_5 = tick.bid_price_5

                db_tick.ask_price_2 = tick.ask_price_2
                db_tick.ask_price_3 = tick.ask_price_3
                db_tick.ask_price_4 = tick.ask_price_4
                db_tick.ask_price_5 = tick.ask_price_5

                db_tick.bid_volume_2 = tick.bid_volume_2
                db_tick.bid_volume_3 = tick.bid_volume_3
                db_tick.bid_volume_4 = tick.bid_volume_4
                db_tick.bid_volume_5 = tick.bid_volume_5

                db_tick.ask_volume_2 = tick.ask_volume_2
                db_tick.ask_volume_3 = tick.ask_volume_3
                db_tick.ask_volume_4 = tick.ask_volume_4
                db_tick.ask_volume_5 = tick.ask_volume_5

            return db_tick

        def to_tick(self):
            """
            Generate TickData object from DbTickData.
            """
            tick = TickData(
                symbol=self.symbol,
                exchange=Exchange(self.exchange),
                datetime=self.datetime,
                name=self.name,
                volume=self.volume,
                last_price=self.last_price,
                last_volume=self.last_volume,
                limit_up=self.limit_up,
                limit_down=self.limit_down,
                open_price=self.open_price,
                high_price=self.high_price,
                low_price=self.low_price,
                pre_close=self.pre_close,
                bid_price_1=self.bid_price_1,
                ask_price_1=self.ask_price_1,
                bid_volume_1=self.bid_volume_1,
                ask_volume_1=self.ask_volume_1,
                gateway_name="DB",
            )

            if self.bid_price_2:
                tick.bid_price_2 = self.bid_price_2
                tick.bid_price_3 = self.bid_price_3
                tick.bid_price_4 = self.bid_price_4
                tick.bid_price_5 = self.bid_price_5

                tick.ask_price_2 = self.ask_price_2
                tick.ask_price_3 = self.ask_price_3
                tick.ask_price_4 = self.ask_price_4
                tick.ask_price_5 = self.ask_price_5

                tick.bid_volume_2 = self.bid_volume_2
                tick.bid_volume_3 = self.bid_volume_3
                tick.bid_volume_4 = self.bid_volume_4
                tick.bid_volume_5 = self.bid_volume_5

                tick.ask_volume_2 = self.ask_volume_2
                tick.ask_volume_3 = self.ask_volume_3
                tick.ask_volume_4 = self.ask_volume_4
                tick.ask_volume_5 = self.ask_volume_5

            return tick

        @staticmethod
        def save_all(objs: List["DbTickData"]):
            dicts = [i.to_dict() for i in objs]
            with db.atomic():
                if driver is Driver.POSTGRESQL:
                    for tick in dicts:
                        DbTickData.insert(tick).on_conflict(
                            update=tick,
                            conflict_target=(
                                DbTickData.datetime,
                                DbTickData.symbol,
                                DbTickData.exchange,
                            ),
                        ).execute()
                else:
                    for c in chunked(dicts, 50):
                        DbTickData.insert_many(
                            c).on_conflict_replace().execute()
Exemplo n.º 8
0
class Talk(Model):
    partner1 = ForeignKeyField(Stranger, related_name='talks_as_partner1')
    partner1_sent = IntegerField(default=0)
    partner2 = ForeignKeyField(Stranger, related_name='talks_as_partner2')
    partner2_sent = IntegerField(default=0)
    searched_since = DateTimeField()
    begin = DateTimeField(default=datetime.datetime.utcnow)
    end = DateTimeField(index=True, null=True)

    class Meta:
        database = DATABASE_PROXY

    @classmethod
    def delete_old(cls, before):
        cls.delete().where(Talk.end < before).execute()

    @classmethod
    def get_ended_talks(cls, after=None):
        talks = cls.select()
        if after is None:
            talks = talks.where(Talk.end != None)
        else:
            talks = talks.where(Talk.end >= after)
        return talks

    @classmethod
    def get_last_partners_ids(cls, stranger_id):
        """Args:
            stranger_id (int)

        Yields:
            int: IDs of last partners.

        """
        talks = cls.select() \
            .where((cls.partner1_id == stranger_id) | (cls.partner2_id == stranger_id))

        for talk in talks:
            yield talk.get_partner_id(stranger_id)

    @classmethod
    def get_not_ended_talks(cls, after=None):
        # pylint: disable=singleton-comparison
        talks = cls.select().where(cls.end == None)
        if after is not None:
            talks = talks.where(Talk.begin >= after)
        return talks

    @classmethod
    def get_talk_by_partner_id(cls, stranger_id):
        try:
            # pylint: disable=singleton-comparison
            talk = cls.get(
                ((cls.partner1_id == stranger_id) | (cls.partner2_id == stranger_id)) &
                (cls.end == None),
                )
        except DoesNotExist:
            return None
        else:
            return talk

    def get_partner_id(self, stranger_id):
        """Raises:
            WrongStrangerError: If given stranger isn't a partner in the talk.
        """
        if stranger_id == self.partner1_id:
            return self.partner2_id
        elif stranger_id == self.partner2_id:
            return self.partner1_id
        else:
            LOGGER.error('Stranger %s isn\'t a partner in the talk %d', stranger_id, self.id)
            raise WrongStrangerError()

    def get_sent(self, stranger):
        if stranger == self.partner1:
            return self.partner1_sent
        elif stranger == self.partner2:
            return self.partner2_sent
        else:
            raise WrongStrangerError()

    def increment_sent(self, stranger):
        if stranger == self.partner1:
            self.partner1_sent += 1
        elif stranger == self.partner2:
            self.partner2_sent += 1
        else:
            raise WrongStrangerError()
        self.save()

    def is_successful(self):
        return self.partner1_sent and self.partner2_sent
Exemplo n.º 9
0
Arquivo: models.py Projeto: ry-v1/lms
class SharedSolutionEntry(BaseModel):
    referrer = TextField(null=True)
    time = DateTimeField(default=datetime.now())
    user = ForeignKeyField(User, backref='entries')
    shared_solution = ForeignKeyField(SharedSolution, backref='entries')
Exemplo n.º 10
0
class Account(flaskDb.Model):
    auth_service = Utf8mb4CharField(max_length=6, default='ptc')
    username = Utf8mb4CharField(primary_key=True)
    password = Utf8mb4CharField(null=True)
    email = Utf8mb4CharField(null=True)
    last_modified = DateTimeField(index=True, default=datetime.now)
    system_id = Utf8mb4CharField(max_length=64, index=True,
                                 null=True)  # system which uses the account
    assigned_at = DateTimeField(index=True, null=True)
    latitude = DoubleField(null=True)
    longitude = DoubleField(null=True)
    # from player_stats
    level = SmallIntegerField(index=True, null=True)
    xp = IntegerField(null=True)
    encounters = IntegerField(null=True)
    balls_thrown = IntegerField(null=True)
    captures = IntegerField(null=True)
    spins = IntegerField(null=True)
    walked = DoubleField(null=True)
    # from get_inbox
    team = Utf8mb4CharField(max_length=16, null=True)
    coins = IntegerField(null=True)
    stardust = IntegerField(null=True)
    # account health
    warn = BooleanField(null=True)
    banned = BooleanField(index=True, null=True)
    ban_flag = BooleanField(null=True)
    tutorial_state = Utf8mb4CharField(
        null=True)  # a CSV-list of tutorial steps completed
    captcha = BooleanField(index=True, null=True)
    rareless_scans = IntegerField(index=True, null=True)
    shadowbanned = BooleanField(index=True, null=True)
    # inventory info
    balls = SmallIntegerField(null=True)
    total_items = SmallIntegerField(null=True)
    pokemon = SmallIntegerField(null=True)
    eggs = SmallIntegerField(null=True)
    incubators = SmallIntegerField(null=True)
    lures = SmallIntegerField(null=True)

    @staticmethod
    def get_accounts(system_id,
                     count=1,
                     min_level=1,
                     max_level=40,
                     reuse=False,
                     banned_or_new=False):
        # Only one client can request accounts at a time
        request_lock.acquire()

        main_condition = None
        if banned_or_new:
            main_condition = Account.banned.is_null(True) | (
                Account.banned == True) | (Account.shadowbanned == True)
            reuse = False
        else:
            main_condition = (Account.banned == False) & (Account.shadowbanned
                                                          == False)

        queries = []
        if reuse:
            # Look for good accounts for same system_id
            queries.append(
                Account.select().where((Account.system_id == system_id)
                                       & main_condition))
        # Look for good accounts that are unused
        queries.append(Account.select().where(
            Account.system_id.is_null(True) & main_condition))

        accounts = []
        for query in queries:
            if count > 0:
                # Additional conditions
                if min_level > 1:
                    query = query.where(Account.level >= min_level)
                if max_level < 40:
                    query = query.where(Account.level <= max_level)
                # TODO: Add filter for nearby location

                # Limitations and order
                #query = query.limit(count).order_by(Account.shadowbanned.desc(), Account.last_modified.asc())
                query = query.limit(count).order_by(
                    Account.last_modified.asc())
                #log.warning("{}".format(query))
                for account in query:
                    accounts.append({
                        'auth_service': account.auth_service,
                        'username': account.username,
                        'password': account.password,
                        'latitude': account.latitude,
                        'longitude': account.longitude,
                        'rareless_scans': account.rareless_scans,
                        'shadowbanned': account.shadowbanned,
                        'last_modified': account.last_modified,
                        'lures': account.lures
                    })

                    old_system_id = account.system_id
                    account.system_id = system_id
                    account.last_modified = datetime.now()

                    if old_system_id != system_id:
                        account.assigned_at = datetime.now()
                        new_account_event(
                            account, "Got assigned to [{}]".format(system_id))
                        if webhook_queue:
                            webhook_queue.put((
                                'assign',
                                create_webhook_data(
                                    'assign', None, account,
                                    "Got assigned to [{}]".format(system_id))))

                    account.save()

                    count -= 1

        request_lock.release()
        return accounts

    @staticmethod
    def get_LureAccounts(count=1, min_level=1, max_level=40):
        # Only one client can request accounts at a time
        request_lock.acquire()

        #main_condition = None
        #if banned_or_new:
        #    main_condition = Account.banned.is_null(True) | (Account.banned == True) | (Account.shadowbanned == True)
        #    reuse = False
        #else:
        #    main_condition = (Account.banned == False) & (Account.shadowbanned == False)

        queries = []
        queries.append(
            Account.select().where((Account.system_id.is_null(False))
                                   & (Account.lures > 0)))

        accounts = []
        for query in queries:
            if count > 0:
                # Additional conditions
                if min_level > 1:
                    query = query.where(Account.level >= min_level)
                if max_level < 40:
                    query = query.where(Account.level <= max_level)
                # TODO: Add filter for nearby location
                query = query.where(Account.lures > 0)
                # Limitations and order
                query = query.limit(count)

                for account in query:
                    accounts.append({
                        'auth_service': account.auth_service,
                        'username': account.username,
                        'password': account.password,
                        'latitude': account.latitude,
                        'longitude': account.longitude,
                        'rareless_scans': account.rareless_scans,
                        'shadowbanned': account.shadowbanned,
                        'last_modified': account.last_modified
                    })
                    count -= 1

        request_lock.release()
        return accounts
Exemplo n.º 11
0
class BaseModel(Model):
    add_time = DateTimeField(default=datetime.now, verbose_name="添加时间")

    class Meta:
        database = database
Exemplo n.º 12
0
class Event(flaskDb.Model):
    timestamp = DateTimeField(default=datetime.now, index=True)
    entity_type = Utf8mb4CharField(max_length=16)
    entity_id = Utf8mb4CharField(index=True)
    description = Utf8mb4CharField()
Exemplo n.º 13
0
class PrintJobModel(BaseModel):

    userName = CharField(null=True)
    fileOrigin = CharField(null=True)  #new since db-scheme2
    fileName = TextField(null=True)
    filePathName = TextField(null=True)
    fileSize = IntegerField(null=True)
    printStartDateTime = DateTimeField(null=True)
    printEndDateTime = DateTimeField(null=True)
    duration = IntegerField(null=True)
    printStatusResult = CharField(null=True)
    noteText = TextField(null=True)
    noteDeltaFormat = TextField(null=True)
    noteHtml = TextField(null=True)
    printedLayers = CharField(null=True)
    printedHeight = CharField(null=True)
    slicerSettingsAsText = TextField(null=True)

    allFilaments = None
    allTemperatures = None

    # Because I don't know how to add relation-models to peewee I use a temp-array
    def addFilamentModel(self, filamentModel):
        if self.allFilaments == None:
            self.allFilaments = []
        self.allFilaments.append(filamentModel)
        pass

    def getFilamentModels(self):
        return self.allFilaments

    # Current UI implementation could only handle one filament-spool, but databasemodel support multiple spools
    def loadFilamentFromAssoziation(self):
        result = None
        allFilaments = self.filaments
        allFilamentsCount = len(allFilaments)
        if allFilamentsCount != 0:
            for filament in allFilaments:
                result = filament
                self.addFilamentModel(result)
                break
        return result

    # Because I don't know how to add relation-models to peewee I use a temp-array
    def addTemperatureModel(self, temperatureModel):
        if self.allTemperatures == None:
            self.allTemperatures = []
        self.allTemperatures.append(temperatureModel)
        pass

    def getTemperatureModels(self):
        if self.allTemperatures == None:
            self.allTemperatures = []
        return self.allTemperatures

    def getTemperaturesFromAssoziation(self):
        result = []
        allTemps = self.temperatures
        allTemperaturesCount = len(allTemps)
        if allTemperaturesCount != 0:
            for temps in allTemps:
                result.append(temps)
        return result
Exemplo n.º 14
0
class Task(BaseModel):
    user = ForeignKeyField(User, backref='task')
    message = TextField()
    created_date = DateTimeField(default=datetime.datetime.now)
    is_published = BooleanField(default=True)
Exemplo n.º 15
0
class MapData(BaseModel):
    beatmap_id = IntegerField(primary_key=True)
    bpm = DoubleField(index=True)
    max_combo = DoubleField(index=True)
    length = DoubleField(index=True)
    submit_date = DateTimeField(index=True)
    creation_date = DateTimeField(index=True)

    cs_nomod = DoubleField(index=True)
    od_nomod = DoubleField(index=True)
    ar_nomod = DoubleField(index=True)
    hp_nomod = DoubleField(index=True)
    star_nomod = DoubleField(index=True)

    cs_dt = DoubleField(index=True)
    od_dt = DoubleField(index=True)
    ar_dt = DoubleField(index=True)
    hp_dt = DoubleField(index=True)
    star_dt = DoubleField(index=True)

    cs_hrdt = DoubleField(index=True)
    od_hrdt = DoubleField(index=True)
    ar_hrdt = DoubleField(index=True)
    hp_hrdt = DoubleField(index=True)
    star_hrdt = DoubleField(index=True)

    cs_ezdt = DoubleField()
    od_ezdt = DoubleField()
    ar_ezdt = DoubleField()
    hp_ezdt = DoubleField()
    star_ezdt = DoubleField()

    cs_ht = DoubleField(index=True)
    od_ht = DoubleField(index=True)
    ar_ht = DoubleField(index=True)
    hp_ht = DoubleField(index=True)
    star_ht = DoubleField(index=True)

    cs_hrht = DoubleField(index=True)
    od_hrht = DoubleField(index=True)
    ar_hrht = DoubleField(index=True)
    hp_hrht = DoubleField(index=True)
    star_hrht = DoubleField(index=True)

    cs_ezht = DoubleField()
    od_ezht = DoubleField()
    ar_ezht = DoubleField()
    hp_ezht = DoubleField()
    star_ezht = DoubleField()

    cs_hr = DoubleField(index=True)
    od_hr = DoubleField(index=True)
    ar_hr = DoubleField(index=True)
    hp_hr = DoubleField(index=True)
    star_hr = DoubleField(index=True)

    cs_ez = DoubleField()
    od_ez = DoubleField()
    ar_ez = DoubleField()
    hp_ez = DoubleField()
    star_ez = DoubleField()

    class Meta:
        table_name = 'maps_data'
        db_table = 'maps_data'
Exemplo n.º 16
0
def add_plant_fertilized_at(migrator):
    dt = datetime.now() - timedelta(days=4)
    migrate.migrate(
        migrator.add_column("plant", "fertilized_at",
                            DateTimeField(default=dt)))
Exemplo n.º 17
0
Arquivo: models.py Projeto: vixus0/wit
class Case(BaseModel):
    opened = DateTimeField(default=datetime.now)
    closed = DateTimeField(null=True)
    name = CharField(index=True)
Exemplo n.º 18
0
class LongPosition(BaseModel):
    exchange = CharField()
    market = CharField()
    buy_order_id = IntegerField()
    buy_quantity = DecimalField()
    purchase_price = DecimalField()
    fees = DecimalField()
    timestamp = DateTimeField()
    watchlist = CharField()
    sell_order_id = IntegerField(null=True)
    sell_quantity = DecimalField(null=True)
    sell_price = DecimalField(null=True)
    sell_timestamp = DateTimeField(null=True)
    scalped_quantity = DecimalField(null=True)

    def __str__(self):
        return f"{self.id}: {self.market} {time.ctime(self.timestamp)}"

    def save(self, *args, **kwargs):
        self.last_updated = datetime.datetime.now()
        super(LongPosition, self).save(*args, **kwargs)

    @staticmethod
    def get_last_position(market):
        p = LongPosition.select().where(LongPosition.market == market).limit(1)
        if p and len(p) > 0:
            return p[0]
        else:
            return None

    @staticmethod
    def get_last_positions(num, market=None):
        if not market:
            return LongPosition.select().order_by(
                LongPosition.timestamp.desc()).limit(num)
        else:
            return LongPosition.select().where(
                LongPosition.market == market).order_by(
                    LongPosition.timestamp.desc()).limit(num)

    @staticmethod
    def get_num_positions(market=None, limit=None):
        if market:
            return LongPosition.select().where(
                LongPosition.market == market).order_by(
                    LongPosition.timestamp.desc()).limit(limit).count()
        else:
            return LongPosition.select().order_by(
                LongPosition.timestamp.desc()).limit(limit).count()

    @staticmethod
    def get_results(since=timedelta(days=1)):
        yesterday = datetime.datetime.now() - since
        d = time.mktime(yesterday.timetuple())
        result = LongPosition.select(
            fn.SUM(LongPosition.profit).alias('total_profit'),
            fn.SUM(LongPosition.buy_quantity *
                   LongPosition.purchase_price).alias('total_spent'),
            fn.SUM(LongPosition.fees).alias('total_fees'),
            fn.COUNT(LongPosition.id).alias('num_positions')).where(
                # position is closed...
                (LongPosition.status << [
                    LongPosition.STATUS__CLOSED_RIDE_PROFIT,
                    LongPosition.STATUS__CLOSED_LOSS
                ]) &
                # ...within the last...
                (LongPosition.date_closed >= d))

        return {
            "profit":
            result[0].total_profit,
            "spent":
            result[0].total_spent,
            "profit_percentage":
            result[0].total_profit / result[0].total_spent
            if result[0].total_profit else Decimal('0.0'),
            "num_trades":
            result[0].num_positions,
            "fees":
            result[0].total_fees,
        }

    @staticmethod
    def get_positions_since(since=timedelta(days=1)):
        yesterday = datetime.datetime.now() - since
        d = time.mktime(yesterday.timetuple())
        return LongPosition.select().where(LongPosition.date_closed >= d)

    @staticmethod
    def get_open_positions(market=None):
        if market:
            return LongPosition.select().where(
                LongPosition.market == market,
                LongPosition.sell_timestamp.is_null(True))
        else:
            return LongPosition.select().where(
                LongPosition.sell_timestamp.is_null(True))

    @property
    def timestamp_str(self):
        return datetime.datetime.fromtimestamp(
            self.timestamp).strftime('%Y-%m-%d %H:%M:%S')

    @property
    def spent(self, exclude_fees=True):
        return self.buy_quantity * self.purchase_price

    def calculate_scalp_sell_price(self, market_params, target_price):
        # Must ROUND_UP to make sure we cover our initial investment
        sell_quantity = (self.spent / target_price).quantize(
            market_params.lot_step_size, rounding=ROUND_UP)

        if sell_quantity * target_price < market_params.min_notional:
            # Can't execute a sell order worth less than MIN_NOTIONAL
            # Adjust price up but hold quantity.
            target_price = (market_params.min_notional /
                            sell_quantity).quantize(
                                market_params.price_tick_size,
                                rounding=ROUND_UP)

        if sell_quantity >= self.buy_quantity:
            # The lot_step_size is large (e.g. LTC's 0.01) so there's no way to take a profit
            #   slice this small. Have to target a bigger price jump in order to achieve a scalp.
            #   Resulting scalp quantity will equal the lot_step_size minimum.
            sell_quantity = (self.buy_quantity -
                             market_params.lot_step_size).quantize(
                                 market_params.lot_step_size)
            target_price = (self.spent / sell_quantity).quantize(
                market_params.price_tick_size, rounding=ROUND_UP)

            # print(f"Had to revise target_price up to {target_price} to preserve {(self.buy_quantity - sell_quantity)} scalp")

        return (sell_quantity, target_price)
Exemplo n.º 19
0
class BaseModel(Model):

    created = DateTimeField(default=datetime.datetime.now)

    class Meta:
        database = database
Exemplo n.º 20
0
class Candle(BaseModel):
    INTERVAL__1MINUTE = 1
    INTERVAL__5MINUTE = 2
    INTERVAL__15MINUTE = 3
    INTERVAL__1HOUR = 4
    INTERVAL__4HOUR = 5
    INTERVAL__1DAY = 6
    _intervals = [(INTERVAL__1MINUTE, "1 minute"),
                  (INTERVAL__5MINUTE, "5 minutes"),
                  (INTERVAL__15MINUTE, "15 minutes"),
                  (INTERVAL__1HOUR, "1 hour"), (INTERVAL__4HOUR, "4 hours"),
                  (INTERVAL__1DAY, "1 day")]

    # Unique together CompositeKey fields
    market = CharField()  # e.g. EOSBTC
    interval = SmallIntegerField(choices=_intervals)
    timestamp = DateTimeField()

    open = DecimalField()
    high = DecimalField()
    low = DecimalField()
    close = DecimalField()

    # metric fields
    rsi_1min = DecimalField(null=True)

    class Meta:
        # Enforce 'unique together' constraint
        primary_key = CompositeKey('market', 'interval', 'timestamp')

    def __str__(self):
        return f"{self.market} {self.interval} {self.timestamp}"

    @property
    def timestamp_utc(self):
        return time.ctime(self.timestamp)

    @staticmethod
    def get_last_candles(market, interval, n):
        c = Candle.select().where(Candle.market == market,
                                  Candle.interval == interval).order_by(
                                      Candle.timestamp.desc()).limit(n)
        if not c or len(c) == 0:
            return None

        return c

    @staticmethod
    def get_last_candle(market, interval):
        c = Candle.get_last_candles(market, interval, 1)
        if not c:
            return None

        return c[0]

    @staticmethod
    def batch_create_candles(market, interval, candle_data):
        for d in candle_data:
            Candle.create(
                market=market,
                interval=interval,
                timestamp=d['timestamp'],
                open=d['open'],
                high=d['high'],
                low=d['low'],
                close=d['close'],
            )

    @staticmethod
    def get_historical_candles(market, interval, historical_timestamp, n):
        c = Candle.select().where(
            Candle.market == market, Candle.interval == interval,
            Candle.timestamp <= historical_timestamp).order_by(
                Candle.timestamp.desc()).limit(n)
        if not c or len(c) == 0:
            return None

        return c

    @staticmethod
    def get_historical_candle(market, interval, historical_timestamp):
        c = Candle.select().where(Candle.market == market,
                                  Candle.interval == interval,
                                  Candle.timestamp == historical_timestamp)
        if not c or len(c) == 0:
            return None

        return c[0]

    def num_periods_from_now(self):
        if config.interval == Candle.INTERVAL__1MINUTE:
            timestamp_multiplier = 60
        elif config.interval == Candle.INTERVAL__5MINUTE:
            timestamp_multiplier = 300
        elif config.interval == Candle.INTERVAL__1HOUR:
            timestamp_multiplier = 3600
        else:
            raise Exception("Didn't implement other intervals!")

        cur_timestamp = time.mktime(datetime.datetime.now().timetuple())

        # They are already in seconds, subtract and then divide by timestamp_multiplier
        return int(
            abs(int(cur_timestamp - self.timestamp)) / timestamp_multiplier)

    def calculate_moving_average(self, periods):
        # Assumes we have continuous data for the full 'periods' range
        # ma = Candle.select(
        #         fn.AVG(Candle.close).over(
        #             order_by=[Candle.timestamp],
        #             start=Window.preceding(periods - 1),
        #             end=Window.CURRENT_ROW
        #         )
        #     ).where(
        #         Candle.market == self.market,
        #         Candle.interval == self.interval,
        #         Candle.timestamp <= self.timestamp
        #     ).scalar()
        ma = Decimal('0.0')
        candles = Candle.select().where(
            Candle.market == self.market, Candle.interval == self.interval,
            Candle.timestamp <= self.timestamp).limit(periods).order_by(
                Candle.timestamp.desc())
        for candle in candles:
            ma += candle.close
        return ma / Decimal(periods)
Exemplo n.º 21
0
    class DbBarData(ModelBase):
        """
        Candlestick bar data for database storage.

        Index is defined unique with datetime, interval, symbol
        """

        id = AutoField()
        symbol: str = CharField()
        exchange: str = CharField()
        datetime: datetime = DateTimeField()
        interval: str = CharField()

        volume: float = FloatField()
        open_price: float = FloatField()
        high_price: float = FloatField()
        low_price: float = FloatField()
        close_price: float = FloatField()

        class Meta:
            database = db
            indexes = ((("datetime", "interval", "symbol", "exchange"),
                        True), )

        @staticmethod
        def from_bar(bar: BarData):
            """
            Generate DbBarData object from BarData.
            """
            db_bar = DbBarData()

            db_bar.symbol = bar.symbol
            db_bar.exchange = bar.exchange.value
            db_bar.datetime = bar.datetime
            db_bar.interval = bar.interval.value
            db_bar.volume = bar.volume
            db_bar.open_price = bar.open_price
            db_bar.high_price = bar.high_price
            db_bar.low_price = bar.low_price
            db_bar.close_price = bar.close_price

            return db_bar

        def to_bar(self):
            """
            Generate BarData object from DbBarData.
            """
            bar = BarData(
                symbol=self.symbol,
                exchange=Exchange(self.exchange),
                datetime=self.datetime,
                interval=Interval(self.interval),
                volume=self.volume,
                open_price=self.open_price,
                high_price=self.high_price,
                low_price=self.low_price,
                close_price=self.close_price,
                gateway_name="DB",
            )
            return bar

        @staticmethod
        def save_all(objs: List["DbBarData"]):
            """
            save a list of objects, update if exists.
            """
            dicts = [i.to_dict() for i in objs]
            with db.atomic():
                if driver is Driver.POSTGRESQL:
                    for bar in dicts:
                        DbBarData.insert(bar).on_conflict(
                            update=bar,
                            conflict_target=(
                                DbBarData.datetime,
                                DbBarData.interval,
                                DbBarData.symbol,
                                DbBarData.exchange,
                            ),
                        ).execute()
                else:
                    for c in chunked(dicts, 50):
                        DbBarData.insert_many(
                            c).on_conflict_replace().execute()
Exemplo n.º 22
0
class Price(BaseModel):
    instrument = ForeignKeyField(Instrument, related_name="prices", null=False)
    price = FloatField()
    datetime = DateTimeField(null=False)
Exemplo n.º 23
0
    class DbFactorGroupData(ModelBase):
        """
        Candlestick bar data for database storage.
        Index is defined unique with datetime, interval, symbol
        """

        id = AutoField()
        stock_id: str = CharField()
        date: datetime = DateTimeField()

        industry: str = CharField()
        group: int = IntegerField()

        stock_return: float = FloatField()
        factor_name: str = CharField()
        holding_period: int = IntegerField()
        factor_name_chinese: str = CharField()
        factor_value: float = FloatField(null=True)
        factor_type: str = CharField()

        datetime_update: datetime = DateTimeField()

        class Meta:
            database = db
            indexes = ((("stock_id", "date", "factor_name", "holding_period"), True),)

        @staticmethod
        def from_group(group: GroupData):
            """
                        Generate DbBarData object from BarData.
                        """
            db_bar = DbFactorGroupData()

            db_bar.stock_id = group.stock_id
            db_bar.date = group.date

            db_bar.industry = group.industry
            db_bar.group = group.group

            db_bar.stock_return = group.stock_return
            db_bar.holding_period = group.holding_period
            db_bar.factor_name = group.factor_name
            db_bar.factor_value = group.factor_value
            db_bar.factor_name_chinese = group.factor_name_chinese
            db_bar.factor_type = group.factor_type

            db_bar.datetime_update = datetime.now()

            return db_bar

        def to_bar(self):
            """
            Generate GroupData object from DbGroupData.
            """
            group = GroupData()
            return group

        @staticmethod
        def save_all(objs: List["DbFactorGroupData"]):
            """
            save a list of objects, update if exists.
            """
            dicts = [i.to_dict() for i in objs]
            with db.atomic():
                if driver is Driver.POSTGRESQL:
                    for bar in dicts:
                        DbFactorGroupData.insert(bar).on_conflict(
                            update=bar,
                            conflict_target=(
                                DbFactorGroupData.stock_id,
                                DbFactorGroupData.date,
                            ),
                        ).execute()
                else:
                    for c in chunked(dicts, 5000):
                        DbFactorGroupData.insert_many(c).on_conflict_replace().execute()
Exemplo n.º 24
0
class AhvImagesCache(CacheTableBase):
    __cache_type__ = "ahv_disk_image"
    name = CharField()
    image_type = CharField()
    uuid = CharField()
    last_update_time = DateTimeField(default=datetime.datetime.now())

    def get_detail_dict(self, *args, **kwargs):
        return {
            "name": self.name,
            "uuid": self.uuid,
            "image_type": self.image_type,
            "last_update_time": self.last_update_time,
        }

    @classmethod
    def clear(cls, *args, **kwargs):
        """removes entire data from table"""
        for db_entity in cls.select():
            db_entity.delete_instance()

    @classmethod
    def create_entry(cls, name, uuid, **kwargs):
        image_type = kwargs.get("image_type", "")
        # Store data in table
        super().create(name=name, uuid=uuid, image_type=image_type)

    @classmethod
    def get_entity_data(cls, name, **kwargs):
        image_type = kwargs.get("image_type", None)
        if not image_type:
            raise ValueError(
                "image_type not provided for image {}".format(name))

        try:
            entity = super().get(cls.name == name,
                                 cls.image_type == image_type)
            return entity.get_detail_dict()

        except DoesNotExist:
            return None

    @classmethod
    def sync(cls, *args, **kwargs):
        """sync the table data from server"""
        # clear old data
        cls.clear()

        # update by latest data
        config = get_config()
        client = get_api_client()

        project_name = config["PROJECT"]["name"]
        params = {"length": 1000, "filter": "name=={}".format(project_name)}
        project_name_uuid_map = client.project.get_name_uuid_map(params)

        if not project_name_uuid_map:
            LOG.error("Invalid project {} in config".format(project_name))
            sys.exit(-1)

        project_id = project_name_uuid_map[project_name]
        res, err = client.project.read(project_id)
        if err:
            raise Exception("[{}] - {}".format(err["code"], err["error"]))

        project = res.json()
        accounts = project["status"]["project_status"]["resources"][
            "account_reference_list"]

        reg_accounts = []
        for account in accounts:
            reg_accounts.append(account["uuid"])

        # As account_uuid is required for versions>2.9.0
        account_uuid = ""
        payload = {"length": 250, "filter": "type==nutanix_pc"}
        res, err = client.account.list(payload)
        if err:
            raise Exception("[{}] - {}".format(err["code"], err["error"]))

        res = res.json()
        for entity in res["entities"]:
            entity_id = entity["metadata"]["uuid"]
            if entity_id in reg_accounts:
                account_uuid = entity_id
                break

        AhvVmProvider = get_provider("AHV_VM")
        AhvObj = AhvVmProvider.get_api_obj()
        res = AhvObj.images(account_uuid=account_uuid)

        for entity in res["entities"]:
            name = entity["status"]["name"]
            uuid = entity["metadata"]["uuid"]
            # TODO add proper validation for karbon images
            image_type = entity["status"]["resources"].get("image_type", "")
            cls.create_entry(name=name, uuid=uuid, image_type=image_type)

    @classmethod
    def show_data(cls, *args, **kwargs):
        """display stored data in table"""

        if not len(cls.select()):
            click.echo(highlight_text("No entry found !!!"))
            return

        table = PrettyTable()
        table.field_names = ["NAME", "UUID", "IMAGE_TYPE", "LAST UPDATED"]
        for entity in cls.select().order_by(cls.image_type):
            entity_data = entity.get_detail_dict()
            last_update_time = arrow.get(
                entity_data["last_update_time"].astimezone(
                    datetime.timezone.utc)).humanize()
            table.add_row([
                highlight_text(entity_data["name"]),
                highlight_text(entity_data["uuid"]),
                highlight_text(entity_data["image_type"]),
                highlight_text(last_update_time),
            ])
        click.echo(table)

    class Meta:
        database = dsl_database
        primary_key = CompositeKey("name", "uuid")
Exemplo n.º 25
0
    class DBFactorRetData(ModelBase):
        """
                Candlestick bar data for database storage.
                Index is defined unique with datetime, interval, symbol
                """

        id = AutoField()

        date: datetime = DateTimeField()

        factor_return: float = FloatField()
        holding_period: int = IntegerField()
        factor_T: float = FloatField(null=True)
        factor_name: str = CharField()
        factor_name_chinese: str = CharField()
        ret_type: str = CharField()

        datetime_update: datetime = DateTimeField()

        class Meta:
            database = db
            indexes = ((("date", "factor_name", "ret_type", "holding_period"), True),)

        @staticmethod
        def from_ret(ret: FactorRetData):
            """
                        Generate DbBarData object from BarData.
                        """
            db_bar = DBFactorRetData()

            db_bar.date = ret.date

            db_bar.factor_return = ret.factor_return
            db_bar.factor_T = ret.factor_T
            db_bar.holding_period = ret.holding_period
            db_bar.factor_name = ret.factor_name
            db_bar.factor_name_chinese = ret.factor_name_chinese
            db_bar.ret_type = ret.ret_type
            db_bar.datetime_update = datetime.now()

            return db_bar

        def to_bar(self):
            """
            Generate GroupData object from DbGroupData.
            """
            Ret = FactorRetData()
            return Ret

        @staticmethod
        def save_all(objs: List["DBFactorRetData"]):
            """
            save a list of objects, update if exists.
            """
            dicts = [i.to_dict() for i in objs]
            with db.atomic():
                if driver is Driver.POSTGRESQL:
                    for bar in dicts:
                        DBFactorRetData.insert(bar).on_conflict(
                            update=bar,
                            conflict_target=(
                                DBFactorRetData.stock_id,
                                DBFactorRetData.date,
                            ),
                        ).execute()
                else:
                    for c in chunked(dicts, 1000):
                        DBFactorRetData.insert_many(c).on_conflict_replace().execute()

        def query_data(self,
                       factor_names: tuple,
                       ret_type: str = 'Pearson',
                       hp: int = 1,
                       sta_date: str = '2013-01-01',
                       end_date: str = '2020-04-01'):

            factor_sql = f"SELECT DATE_FORMAT(`date`,'%Y-%m-%d') as `date`,  factor_return, factor_name " \
                         f"FROM dbfactorretdata " \
                         f"WHERE factor_name IN {factor_names} " \
                         f"AND ret_type = '{ret_type}' " \
                         f"AND holding_period = '{hp}' " \
                         f"AND `date` BETWEEN str_to_date('{sta_date}', '%Y-%m-%d') " \
                         f"AND str_to_date('{end_date}', '%Y-%m-%d') "
            res = pd.read_sql(factor_sql, con=MySQL_con)
            return None if res.empty else res
Exemplo n.º 26
0
class AhvNetworkFunctionChain(CacheTableBase):
    __cache_type__ = "ahv_network_function_chain"
    name = CharField()
    uuid = CharField()
    last_update_time = DateTimeField(default=datetime.datetime.now())

    def get_detail_dict(self, *args, **kwargs):
        return {
            "name": self.name,
            "uuid": self.uuid,
            "last_update_time": self.last_update_time,
        }

    @classmethod
    def clear(cls, *args, **kwargs):
        """removes entire data from table"""
        for db_entity in cls.select():
            db_entity.delete_instance()

    @classmethod
    def create_entry(cls, name, uuid, **kwargs):
        super().create(
            name=name,
            uuid=uuid,
        )

    @classmethod
    def get_entity_data(cls, name, **kwargs):
        try:
            entity = super().get(cls.name == name)
            return entity.get_detail_dict()

        except DoesNotExist:
            return None

    @classmethod
    def sync(cls, *args, **kwargs):
        # clear old data
        cls.clear()

        # update by latest data
        client = get_api_client()
        Obj = get_resource_api("network_function_chains", client.connection)
        res, err = Obj.list({"length": 1000})
        if err:
            raise Exception("[{}] - {}".format(err["code"], err["error"]))

        res = res.json()
        for entity in res["entities"]:
            name = entity["status"]["name"]
            uuid = entity["metadata"]["uuid"]
            cls.create_entry(name=name, uuid=uuid)

    @classmethod
    def show_data(cls, *args, **kwargs):
        """display stored data in table"""

        if not len(cls.select()):
            click.echo(highlight_text("No entry found !!!"))
            return

        table = PrettyTable()
        table.field_names = ["NAME", "UUID", "LAST UPDATED"]
        for entity in cls.select():
            entity_data = entity.get_detail_dict()
            last_update_time = arrow.get(
                entity_data["last_update_time"].astimezone(
                    datetime.timezone.utc)).humanize()
            table.add_row([
                highlight_text(entity_data["name"]),
                highlight_text(entity_data["uuid"]),
                highlight_text(last_update_time),
            ])
        click.echo(table)

    class Meta:
        database = dsl_database
        primary_key = CompositeKey("name", "uuid")
Exemplo n.º 27
0
class _BaseModel(Model):
    class Meta:
        database = db

    updated_at = DateTimeField(default=None, null=True)
    created_at = DateTimeField(default=datetime.datetime.utcnow())
Exemplo n.º 28
0
class AhvSubnetsCache(CacheTableBase):
    __cache_type__ = "ahv_subnet"
    name = CharField()
    uuid = CharField()
    cluster = CharField()
    last_update_time = DateTimeField(default=datetime.datetime.now())

    def get_detail_dict(self, *args, **kwargs):
        return {
            "name": self.name,
            "uuid": self.uuid,
            "cluster": self.cluster,
            "last_update_time": self.last_update_time,
        }

    @classmethod
    def clear(cls, *args, **kwargs):
        """removes entire data from table"""
        for db_entity in cls.select():
            db_entity.delete_instance()

    @classmethod
    def create_entry(cls, name, uuid, **kwargs):
        cluster_name = kwargs.get("cluster", None)
        if not cluster_name:
            raise ValueError("cluster not supplied for subnet {}".format(name))

        # store data in table
        super().create(
            name=name,
            uuid=uuid,
            cluster=cluster_name,
        )

    @classmethod
    def get_entity_data(cls, name, **kwargs):
        cluster_name = kwargs.get("cluster", "")
        try:
            if cluster_name:
                entity = super().get(
                    cls.name == name,
                    cls.cluster == cluster_name,
                )
            else:
                # The get() method is shorthand for selecting with a limit of 1
                # If more than one row is found, the first row returned by the database cursor
                entity = super().get(cls.name == name)
            return entity.get_detail_dict()

        except DoesNotExist:
            return None

    @classmethod
    def show_data(cls, *args, **kwargs):
        """display stored data in table"""

        if not len(cls.select()):
            click.echo(highlight_text("No entry found !!!"))
            return

        table = PrettyTable()
        table.field_names = ["NAME", "UUID", "CLUSTER_NAME", "LAST UPDATED"]
        for entity in cls.select():
            entity_data = entity.get_detail_dict()
            last_update_time = arrow.get(
                entity_data["last_update_time"].astimezone(
                    datetime.timezone.utc)).humanize()
            table.add_row([
                highlight_text(entity_data["name"]),
                highlight_text(entity_data["uuid"]),
                highlight_text(entity_data["cluster"]),
                highlight_text(last_update_time),
            ])
        click.echo(table)

    @classmethod
    def sync(cls, *args, **kwargs):
        """sync the table data from server"""
        # clear old data
        cls.clear()

        # update by latest data
        config = get_config()
        client = get_api_client()

        project_name = config["PROJECT"]["name"]
        params = {"length": 1000, "filter": "name=={}".format(project_name)}
        project_name_uuid_map = client.project.get_name_uuid_map(params)

        if not project_name_uuid_map:
            LOG.error("Invalid project {} in config".format(project_name))
            sys.exit(-1)

        project_id = project_name_uuid_map[project_name]
        res, err = client.project.read(project_id)
        if err:
            raise Exception("[{}] - {}".format(err["code"], err["error"]))

        project = res.json()
        subnets_list = []
        for subnet in project["status"]["project_status"]["resources"][
                "subnet_reference_list"]:
            subnets_list.append(subnet["uuid"])

        # Extending external subnet's list from remote account
        for subnet in project["status"]["project_status"]["resources"][
                "external_network_list"]:
            subnets_list.append(subnet["uuid"])

        accounts = project["status"]["project_status"]["resources"][
            "account_reference_list"]

        reg_accounts = []
        for account in accounts:
            reg_accounts.append(account["uuid"])

        # As account_uuid is required for versions>2.9.0
        account_uuid = ""
        payload = {"length": 250, "filter": "type==nutanix_pc"}
        res, err = client.account.list(payload)
        if err:
            raise Exception("[{}] - {}".format(err["code"], err["error"]))

        res = res.json()
        for entity in res["entities"]:
            entity_id = entity["metadata"]["uuid"]
            if entity_id in reg_accounts:
                account_uuid = entity_id
                break

        AhvVmProvider = get_provider("AHV_VM")
        AhvObj = AhvVmProvider.get_api_obj()

        filter_query = "(_entity_id_=={})".format(
            ",_entity_id_==".join(subnets_list), )
        res = AhvObj.subnets(account_uuid=account_uuid,
                             filter_query=filter_query)
        for entity in res["entities"]:
            name = entity["status"]["name"]
            uuid = entity["metadata"]["uuid"]
            cluster_ref = entity["status"]["cluster_reference"]
            cluster_name = cluster_ref.get("name", "")

            cls.create_entry(name=name, uuid=uuid, cluster=cluster_name)

    class Meta:
        database = dsl_database
        primary_key = CompositeKey("name", "uuid")
Exemplo n.º 29
0
class Solution(BaseModel):
    STATES = SolutionState
    STATUS_VIEW = SolutionStatusView
    MAX_CHECK_TIME_SECONDS = 60 * 10

    exercise = ForeignKeyField(Exercise, backref='solutions')
    solver = ForeignKeyField(User, backref='solutions')
    checker = ForeignKeyField(User, null=True, backref='solutions')
    state = CharField(
        choices=STATES.to_choices(),
        default=STATES.CREATED.name,
        index=True,
    )
    grade = IntegerField(
        default=0,
        constraints=[Check('grade <= 100'),
                     Check('grade >= 0')],
    )
    submission_timestamp = DateTimeField(index=True)
    hashed = TextField()
    last_status_view = CharField(
        choices=STATUS_VIEW.to_choices(),
        default=STATUS_VIEW.UPLOADED.name,
        index=True,
    )
    last_time_view = DateTimeField(default=datetime.now, null=True, index=True)
    assessment = ForeignKeyField(
        SolutionAssessment,
        backref='solutions',
        null=True,
    )

    @property
    def solution_files(
        self, ) -> Union[Iterable['SolutionFile'], 'SolutionFile']:
        return SolutionFile.filter(SolutionFile.solution == self)

    @property
    def is_shared(self):
        return bool(self.shared)

    @property
    def is_checked(self):
        return self.state == self.STATES.DONE.name

    @staticmethod
    def create_hash(content: Union[str, bytes], *args, **kwargs) -> str:
        return hashing.by_content(content, *args, **kwargs)

    @classmethod
    def is_duplicate(
        cls,
        content: Union[str, bytes],
        user: User,
        exercise: Exercise,
        *,
        already_hashed: bool = False,
    ) -> bool:

        hash_ = cls.create_hash(content) if not already_hashed else content

        last_submission_hash = (cls.select(cls.hashed).where(
            cls.exercise == exercise,
            cls.solver == user,
        ).order_by(cls.submission_timestamp.desc()).limit(1).scalar())

        return last_submission_hash == hash_

    def view_solution(self) -> None:
        self.last_time_view = datetime.now()
        if (self.last_status_view != self.STATUS_VIEW.NOT_CHECKED.name
                and self.state == self.STATES.CREATED.name):
            self.last_status_view = self.STATUS_VIEW.NOT_CHECKED.name
        elif (self.last_status_view != self.STATUS_VIEW.CHECKED.name
              and self.state == self.STATES.DONE.name):
            self.last_status_view = self.STATUS_VIEW.CHECKED.name
        self.save()

    def start_checking(self) -> bool:
        return self.set_state(Solution.STATES.IN_CHECKING)

    def set_state(self, new_state: SolutionState, **kwargs) -> bool:
        # Optional: filter the old state of the object
        # to make sure that no two processes set the state together
        requested_solution = (Solution.id == self.id)
        updates_dict = {Solution.state.name: new_state.name}
        changes = Solution.update(
            **updates_dict,
            **kwargs,
        ).where(requested_solution)
        return changes.execute() == 1

    def ordered_versions(self) -> Iterable['Solution']:
        return Solution.select().where(
            Solution.exercise == self.exercise,
            Solution.solver == self.solver,
        ).order_by(Solution.submission_timestamp.asc())

    def test_results(self) -> Iterable[dict]:
        return SolutionExerciseTestExecution.by_solution(self)

    @classmethod
    def of_user(
        cls,
        user_id: int,
        with_archived: bool = False,
        from_all_courses: bool = False,
    ) -> Iterable[Dict[str, Any]]:
        db_exercises = Exercise.get_objects(
            user_id=user_id,
            fetch_archived=with_archived,
            from_all_courses=from_all_courses,
        )
        exercises = Exercise.as_dicts(db_exercises)
        solutions = (cls.select(
            cls.exercise,
            cls.id,
            cls.state,
            cls.checker,
            cls.assessment,
        ).where(cls.exercise.in_(db_exercises),
                cls.solver == user_id).order_by(
                    cls.submission_timestamp.desc()))
        for solution in solutions:
            exercise = exercises[solution.exercise_id]
            if exercise.get('solution_id') is None:
                exercise['solution_id'] = solution.id
                exercise['is_checked'] = solution.is_checked
                exercise['comments_num'] = len(solution.staff_comments)
                if solution.is_checked and solution.checker:
                    exercise['checker'] = solution.checker.fullname
                if solution.assessment:
                    exercise['assessment'] = solution.assessment.name
        return tuple(exercises.values())

    @property
    def comments(self):
        return Comment.select().join(
            SolutionFile, ).where(SolutionFile.solution == self)

    @property
    def ordered_comments(self):
        return self.comments.order_by(Comment.timestamp.desc())

    @property
    def staff_comments(self):
        return self.comments.switch(Comment).join(User).join(Role).where(
            (Comment.commenter.role == Role.get_staff_role().id)
            | (Comment.commenter.role == Role.get_admin_role().id), )

    @property
    def comments_per_file(self):
        return Counter(c.file.id for c in self.staff_comments)

    @classmethod
    def create_solution(
        cls,
        exercise: Exercise,
        solver: User,
        files: List['File'],
        hash_: Optional[str] = None,
    ) -> 'Solution':
        if len(files) == 1:
            hash_ = cls.create_hash(files[0].code)

        if (hash_ and cls.is_duplicate(
                hash_, solver, exercise, already_hashed=True)):
            raise AlreadyExists('This solution already exists.')

        instance = cls.create(
            **{
                cls.exercise.name: exercise,
                cls.solver.name: solver,
                cls.submission_timestamp.name: datetime.now(),
                cls.hashed.name: hash_,
            })

        files_details = [{
            SolutionFile.path.name:
            f.path,
            SolutionFile.solution_id.name:
            instance.id,
            SolutionFile.code.name:
            f.code,
            SolutionFile.file_hash.name:
            SolutionFile.create_hash(f.code),
        } for f in files]
        SolutionFile.insert_many(files_details).execute()

        # update old solutions for this exercise
        other_solutions: Iterable[Solution] = cls.select().where(
            cls.exercise == exercise,
            cls.solver == solver,
            cls.id != instance.id,
        )
        for old_solution in other_solutions:
            old_solution.set_state(Solution.STATES.OLD_SOLUTION)
        return instance

    @classmethod
    def _base_next_unchecked(cls):
        comments_count = fn.Count(Comment.id).alias('comments_count')
        fails = fn.Count(SolutionExerciseTestExecution.id).alias('failures')
        return cls.select(
            cls.id,
            cls.state,
            cls.exercise,
            comments_count,
            fails,
        ).join(
            SolutionFile,
            join_type=JOIN.LEFT_OUTER,
            on=(SolutionFile.solution == cls.id),
        ).join(
            Comment,
            join_type=JOIN.LEFT_OUTER,
            on=(Comment.file == SolutionFile.id),
        ).join(
            SolutionExerciseTestExecution,
            join_type=JOIN.LEFT_OUTER,
            on=(SolutionExerciseTestExecution.solution == cls.id),
        ).where(cls.state == Solution.STATES.CREATED.name, ).group_by(
            cls.id, ).order_by(
                comments_count,
                fails,
                cls.submission_timestamp.asc(),
            )

    def change_assessment(self, assessment_id: Optional[int] = None) -> bool:
        assessment = SolutionAssessment.get_or_none(
            SolutionAssessment.id == assessment_id, )
        requested_solution = (Solution.id == self.id)
        updates_dict = {Solution.assessment.name: assessment}
        changes = Solution.update(**updates_dict).where(requested_solution)
        return changes.execute() == 1

    def mark_as_checked(
        self,
        by: Optional[Union[User, int]] = None,
    ) -> bool:
        return self.set_state(Solution.STATES.DONE, checker=by)

    @classmethod
    def next_unchecked(cls) -> Optional['Solution']:
        try:
            return cls._base_next_unchecked().get()
        except cls.DoesNotExist:
            return None

    @classmethod
    def next_unchecked_of(cls, exercise_id) -> Optional['Solution']:
        try:
            return cls._base_next_unchecked().where(
                cls.exercise == exercise_id, ).get()
        except cls.DoesNotExist:
            return None

    @classmethod
    def status(cls, course_id: Optional[int] = None):
        one_if_is_checked = Case(
            Solution.state,
            ((Solution.STATES.DONE.name, 1), ),
            0,
        )
        fields = (
            Exercise.id,
            Exercise.course,
            Exercise.subject.alias('name'),
            Exercise.is_archived.alias('is_archived'),
            fn.Count(Solution.id).alias('submitted'),
            fn.Sum(one_if_is_checked).alias('checked'),
        )
        active_solution_states = Solution.STATES.active_solutions()
        active_solutions = Solution.state.in_(active_solution_states)
        right_course = (course_id is None) or course_id == Course.id

        return (Exercise.select(*fields).join(
            Course, on=(Course.id == Exercise.course)).switch().join(
                Solution,
                on=(Solution.exercise == Exercise.id
                    )).where(active_solutions & right_course).group_by(
                        Exercise.subject, Exercise.id).order_by(Exercise.id))

    @classmethod
    def left_in_exercise(cls, exercise: Exercise) -> int:
        one_if_is_checked = Case(Solution.state,
                                 ((Solution.STATES.DONE.name, 1), ), 0)
        active_solutions = cls.state.in_(Solution.STATES.active_solutions())
        response = cls.filter(
            cls.exercise == exercise,
            active_solutions,
        ).select(
            fn.Count(cls.id).alias('submitted'),
            fn.Sum(one_if_is_checked).alias('checked'),
        ).dicts().get()
        return int(response['checked'] * 100 / response['submitted'])
Exemplo n.º 30
0
class Session(Model):
    name = CharField(unique=True)
    crack_type = CharField()
    hash_file = CharField()
    pot_file = CharField()
    hash_mode_id = IntegerField()
    rule_file = CharField(null=True)
    wordlist_file = CharField(null=True)
    mask_file = CharField(null=True)
    username_included = BooleanField()
    device_type = IntegerField()
    end_timestamp = IntegerField(null=True)
    output_file = CharField(null=True)
    session_status = CharField()
    time_started = DateTimeField(null=True)
    progress = FloatField()
    reason = TextField()

    class Meta:
        database = database

    def setup(self):
        # File to store the processes output
        random_name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12))
        self.result_file = os.path.join(tempfile.gettempdir(), random_name+".cracked")

        # File to store the hashcat output
        random_name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12))
        self.hashcat_output_file = os.path.join(tempfile.gettempdir(), random_name+".hashcat")
        open(self.hashcat_output_file,'a').close()

        self.hash_type = "N/A"
        self.time_estimated = "N/A"
        self.speed = "N/A"
        self.recovered = "N/A"

    def start(self):
        if os.name == 'nt':
            if Hashcat.number_ongoing_sessions() > 0:
                raise Exception("Windows version of Hashcatnode only supports 1 running hashcat at a time")

        self.thread = threading.Thread(target=self.session_thread)
        self.thread.start()

        # Little delay to ensure the process if properly launched
        time.sleep(1)

        # TO UNCOMMENT
        self.status()

    def session_thread(self):
        # Prepare regex to parse the main hashcat process output
        regex_list = [
            ("hash_type", re.compile("^Hash\.Type\.+: +(.*)\s*$")),
            ("speed", re.compile("^Speed\.#1\.+: +(.*)\s*$")),
        ]
        if self.crack_type == "dictionary":
            regex_list.append(("progress", re.compile("^Progress\.+: +\d+/\d+ \((\S+)%\)\s*$")))
            regex_list.append(("time_estimated", re.compile("^Time\.Estimated\.+: +(.*)\s*$")))
        elif self.crack_type == "mask":
            regex_list.append(("progress", re.compile("^Input\.Mode\.+: +Mask\s+\(\S+\)\s+\[\d+\]\s+\((\S+)%\)\s*$")))

        self.time_started = str(datetime.now())

        if not self.session_status in ["Aborted"]:
            # Command lines used to crack the passwords
            if self.crack_type == "dictionary":
                if self.rule_file != None:
                    cmd_line = [Hashcat.binary, '--session', self.name, '--status', '-a', '0', '-m', str(self.hash_mode_id), self.hash_file, self.wordlist_file, '-r', self.rule_file]
                else:
                    cmd_line = [Hashcat.binary, '--session', self.name, '--status', '-a', '0', '-m', str(self.hash_mode_id), self.hash_file, self.wordlist_file]
            if self.crack_type == "mask":
                cmd_line = [Hashcat.binary, '--session', self.name, '--status', '-a', '3', '-m', str(self.hash_mode_id), self.hash_file, self.mask_file]
            if self.username_included:
                cmd_line += ["--username"]
            if self.device_type:
                cmd_line += ["-D", str(self.device_type)]
            # workload profile
            cmd_line += ["--workload-profile", Hashcat.workload_profile]
            # set pot file
            cmd_line += ["--potfile-path", self.pot_file]
        else:
            # resume previous session
            cmd_line = [Hashcat.binary, '--session', self.name, '--restore']

        print("Session:%s, startup command:%s" % (self.name, " ".join(cmd_line)))
        logging.debug("Session:%s, startup command:%s" % (self.name, " ".join(cmd_line)))
        with open(self.hashcat_output_file, "a") as f:
            f.write("Command: %s\n" % " ".join(cmd_line))

        self.session_status = "Running"
        self.time_started = datetime.utcnow()
        self.save()

        if os.name == 'nt':
            # To controlhashcat on Windows, very different implementation than on linux
            # Look at:
            # https://github.com/hashcat/hashcat/blob/9dffc69089d6c52e6f3f1a26440dbef140338191/src/terminal.c#L477
            free_console=True
            try:
                win32console.AllocConsole()
            except win32console.error as exc:
                if exc.winerror!=5:
                    raise
                ## only free console if one was created successfully
                free_console=False

            self.win_stdin = win32console.GetStdHandle(win32console.STD_INPUT_HANDLE)

        # cwd needs to be added for Windows version of hashcat
        if os.name == 'nt':
            self.session_process = subprocess.Popen(cmd_line, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=os.path.dirname(Hashcat.binary))
        else:
            self.session_process = subprocess.Popen(cmd_line, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, cwd=os.path.dirname(Hashcat.binary))

        self.update_session()

        for line in self.session_process.stdout:
            with open(self.hashcat_output_file, "ab") as f:
                f.write(line)

            line = line.decode()
            line = line.rstrip()

            if line == "Resumed":
                self.session_status = "Running"
                self.save()

            if line == "Paused":
                self.session_status = "Paused"
                self.save()

            for var_regex in regex_list:
                var = var_regex[0]
                regex = var_regex[1]

                m = regex.match(line)
                if m:
                    setattr(self, var, m.group(1))

            # check timestamp
            if self.end_timestamp:
                current_timestamp = int(datetime.utcnow().timestamp())

                if current_timestamp > self.end_timestamp:
                    self.quit()
                    break


        return_code = self.session_process.wait()
        # The cracking ended, set the parameters accordingly
        if return_code in [255,254]:
            self.session_status = "Error"
            if return_code == 254:
                self.reason = "GPU-watchdog alarm"
            else:
                ansi_escape = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]')
                error_msg = self.session_process.stderr.read().decode()
                error_msg = ansi_escape.sub('', error_msg).strip()
                self.reason = error_msg
        elif return_code in [2,3,4]:
            self.session_status = "Aborted"
            self.reason = ""
        else:
            self.session_status = "Done"
            self.reason = ""
        self.time_estimated = "N/A"
        self.speed = "N/A"
        self.save()

    def details(self):
        return {
            "name": self.name,
            "crack_type": self.crack_type,
            "device_type": self.device_type,
            "rule": os.path.basename(self.rule_file)[:-5] if self.rule_file else None,
            "mask": os.path.basename(self.mask_file)[:-7] if self.mask_file else None,
            "wordlist": os.path.basename(self.wordlist_file)[:-1*len(".wordlist")] if self.wordlist_file else None,
            "status": self.session_status,
            "time_started": str(self.time_started),
            "time_estimated": self.time_estimated,
            "speed": self.speed,
            "progress": self.progress,
            "reason": self.reason,
        }

    """
        Returns the first 100000 lines from the potfile starting from a specific line
    """
    def get_potfile(self, from_line):
        line_count = 0
        selected_line_count = 0
        potfile_data = ""
        complete = True
        if os.path.exists(self.pot_file):
            for line in open(self.pot_file, encoding="utf-8"):
                if not line.endswith("\n"):
                    complete = True
                    break

                if line_count >= from_line:
                    potfile_data += line
                    selected_line_count += 1

                if selected_line_count >= 100000:
                    complete = False
                    break

                line_count += 1

            return {
                "line_count": selected_line_count,
                "remaining_data": not complete,
                "potfile_data": potfile_data,
            }
        else:
            return {
                "line_count": 0,
                "remaining_data": False,
                "potfile_data": "",
            }


    """
        Returns hashcat output file
    """
    def hashcat_output(self):
        return open(self.hashcat_output_file).read()

    """
        Returns hashes file
    """
    def hashes(self):
        return open(self.hash_file).read()


    """
        Cleanup the session before deleting it
    """
    def remove(self):
        self.quit()

        try:
            os.remove(self.result_file)
        except:
            pass
        try:
            os.remove(self.pot_file)
        except:
            pass
        try:
            os.remove(self.hash_file)
        except:
            pass
        try:
            os.remove(self.hashcat_output_file)
        except:
            pass

    """
        Return cracked passwords
    """
    def cracked(self):

        # gather cracked passwords
        cmd_line = [Hashcat.binary, '--show', '-m', str(self.hash_mode_id), self.hash_file, '-o', self.result_file]
        if self.username_included:
            cmd_line += ["--username", "--outfile-format", "2"]
        else:
            cmd_line += ["--outfile-format", "3"]
        cmd_line += ["--potfile-path", self.pot_file]
        # cwd needs to be added for Windows version of hashcat
        p = subprocess.Popen(cmd_line, cwd=os.path.dirname(Hashcat.binary))
        p.wait()

        return open(self.result_file).read()

    """
        Update the session
    """
    def update_session(self):
        self.status()

    """
        Update the session
    """
    def status(self):
        if not self.session_status in ["Paused", "Running"]:
            return

        if os.name == 'nt':
            evt = win32console.PyINPUT_RECORDType(win32console.KEY_EVENT)
            evt.Char = 's'
            evt.RepeatCount = 1
            evt.KeyDown = True
            evt.VirtualKeyCode=0x0
            self.win_stdin.WriteConsoleInput([evt])
        else:
            try:
                self.session_process.stdin.write(b's')
                self.session_process.stdin.flush()
            except BrokenPipeError:
                pass

    """
        Pause the session
    """
    def pause(self):
        if not self.session_status in ["Paused", "Running"]:
            return

        if os.name == 'nt':
            evt = win32console.PyINPUT_RECORDType(win32console.KEY_EVENT)
            evt.Char = 'p'
            evt.RepeatCount = 1
            evt.KeyDown = True
            evt.VirtualKeyCode=0x0
            self.win_stdin.WriteConsoleInput([evt])
        else:
            try:
                self.session_process.stdin.write(b'p')
                self.session_process.stdin.flush()
            except BrokenPipeError:
                pass

        self.update_session()

    """
        Resume the session
    """
    def resume(self):
        if not self.session_status in ["Paused", "Running"]:
            return

        if os.name == 'nt':
            evt = win32console.PyINPUT_RECORDType(win32console.KEY_EVENT)
            evt.Char = 'r'
            evt.RepeatCount = 1
            evt.KeyDown = True
            evt.VirtualKeyCode=0x0
            self.win_stdin.WriteConsoleInput([evt])
        else:
            try:
                self.session_process.stdin.write(b'r')
                self.session_process.stdin.flush()
            except BrokenPipeError:
                pass

        self.update_session()

    """
        Quit the session
    """
    def quit(self):
        if not self.session_status in ["Paused", "Running"]:
            return

        if os.name == 'nt':
            evt = win32console.PyINPUT_RECORDType(win32console.KEY_EVENT)
            evt.Char = 'q'
            evt.RepeatCount = 1
            evt.KeyDown = True
            evt.VirtualKeyCode=0x0
            self.win_stdin.WriteConsoleInput([evt])
        else:
            try:
                self.session_process.stdin.write(b'q')
                self.session_process.stdin.flush()
            except BrokenPipeError:
                pass

        print("Waiting for thread to end....")
        self.thread.join()
        print("Done")

        self.session_status = "Aborted"
        self.save()