Exemplo n.º 1
0
    def wrapper_decorator(*args, **kwargs):
        gl_project = args[1]
        project_id = str(gl_project.id)
        last_activity_current = gl_project.attributes["last_activity_at"]
        global language_cache
        if language_cache:
            projects = language_cache.get(gitlab_url)
        else:
            projects = dict()
            language_cache = dict()
        if projects.get(project_id):
            last_activity_cached, language_items = projects[project_id]
            last_activity_cached = MayaDT.from_iso8601(last_activity_cached)
            last_activity_current = MayaDT.from_iso8601(last_activity_current)

            if last_activity_cached >= last_activity_current:
                logger.debug(f"Using cache for project {gl_project.name}")
                return language_items
        """runs function normally, in-case no cache or cache is invalid"""
        languages = func(*args, **kwargs)
        """cache result"""
        projects[project_id] = (
            gl_project.attributes["last_activity_at"],
            list(languages),
        )
        language_cache[gitlab_url] = projects
        return languages
Exemplo n.º 2
0
class TreasureMap(DatastoreRecord):
    # Ideally this is a `policy.collections.TreasureMap`, but it causes a huge
    # circular import due to `Bob` and `Character` in `policy.collections`.
    # TODO #2126
    _treasure_map = RecordField(bytes)
    _expiration = RecordField(
        MayaDT,
        encode=lambda maya_date: maya_date.iso8601().encode(),
        decode=lambda maya_bytes: MayaDT.from_iso8601(maya_bytes.decode()))
Exemplo n.º 3
0
 def __init__(self, r):
     self._get = r
     self._position = r['position']
     self._trainer_id = r['id']
     self._trainer_username = r['username']
     self._user_id = r['user_id']
     self._xp = int(r['xp'])
     self._time = MayaDT.from_iso8601(r['last_updated']).datetime()
     self._level = int(r['level'])
     self._faction = r['faction']
Exemplo n.º 4
0
class PolicyArrangement(DatastoreRecord):
    _arrangement_id = RecordField(bytes)
    _expiration = RecordField(MayaDT,
                encode=lambda maya_date: maya_date.iso8601().encode(),
                decode=lambda maya_bytes: MayaDT.from_iso8601(maya_bytes.decode()))
    _kfrag = RecordField(KFrag,
                encode=lambda kfrag: kfrag.to_bytes(),
                decode=KFrag.from_bytes)
    _alice_verifying_key = RecordField(UmbralPublicKey,
                encode=bytes,
                decode=UmbralPublicKey.from_bytes)
Exemplo n.º 5
0
def parse_time(string: str) -> MayaDT:
    '''
    Convert a string into a time.  If it's not valid (i.e., not on the hour or
    has no timezone), raise a ValueError.
    '''
    time = MayaDT.from_iso8601(string)
    if any(t != 0 for t in (time.minute, time.second, time.microsecond)):
        raise ValueError(string)
    if time.timezone is None:
        raise ValueError(string)
    return time
Exemplo n.º 6
0
    os.getenv('NUCYPHER_CONFIG_ROOT', default=APP_DIR.user_data_dir))
USER_LOG_DIR = Path(
    os.getenv('NUCYPHER_USER_LOG_DIR', default=APP_DIR.user_log_dir))
DEFAULT_LOG_FILENAME = "nucypher.log"
DEFAULT_JSON_LOG_FILENAME = "nucypher.json"

# Static Seednodes
SeednodeMetadata = namedtuple('seednode',
                              ['checksum_address', 'rest_host', 'rest_port'])
SEEDNODES = tuple()

# Sentry (Add your public key and user ID below)
NUCYPHER_SENTRY_PUBLIC_KEY = ""
NUCYPHER_SENTRY_USER_ID = ""
NUCYPHER_SENTRY_ENDPOINT = f"https://{NUCYPHER_SENTRY_PUBLIC_KEY}@sentry.io/{NUCYPHER_SENTRY_USER_ID}"

# Web
CLI_ROOT = NUCYPHER_PACKAGE / 'network' / 'templates'
TEMPLATES_DIR = CLI_ROOT / 'templates'
MAX_UPLOAD_CONTENT_LENGTH = 1024 * 50

# Dev Mode
TEMPORARY_DOMAIN = ":temporary-domain:"  # for use with `--dev` node runtimes

# Event Blocks Throttling
NUCYPHER_EVENTS_THROTTLE_MAX_BLOCKS = 'NUCYPHER_EVENTS_THROTTLE_MAX_BLOCKS'

# Probationary period
END_OF_POLICIES_PROBATIONARY_PERIOD = MayaDT.from_iso8601(
    '2022-3-17T23:59:59.0Z')
Exemplo n.º 7
0
 def __init__(self, r):
     self._get = r
     self._time = MayaDT.from_iso8601(r['generated']).datetime()
     self._title = r['title']
     self._leaderboard = r['leaderboard']
Exemplo n.º 8
0
 def time_remaining(n, latest_crawler_stats):
     data = self.verify_cached_stats(latest_crawler_stats)
     slang = MayaDT.from_iso8601(data['next_period']).slang_time()
     return html.Div([html.H4("Next Period"), html.H5(slang)])
Exemplo n.º 9
0
def _handle_fill_parser(args):
    creation_date = None if not args.creation_date else MayaDT.from_iso8601(
        args.creation_date)
    migrate_data.fill_database(args.questions_xml, args.answers_xml,
                               args.comments_xml, creation_date)
Exemplo n.º 10
0
class BaseEconomics:
    """
    A representation of a contract deployment set's constructor parameters, and the calculations
    used to generate those values from high-level human-understandable parameters.

    Formula for staking in one period for the second phase:
    (totalSupply - currentSupply) * (lockedValue / totalLockedValue) * (k1 + allLockedPeriods) / d / k2

    d - Coefficient which modifies the rate at which the maximum issuance decays
    k1 - Numerator of the locking duration coefficient
    k2 - Denominator of the locking duration coefficient

    if allLockedPeriods > maximum_rewarded_periods then allLockedPeriods = maximum_rewarded_periods
    kappa * log(2) / halving_delay === (k1 + allLockedPeriods) / d

    """

    # Token Denomination
    __token_decimals = 18
    nunits_per_token = 10**__token_decimals  # Smallest unit designation

    # Period Definition
    _default_hours_per_period = 24 * 7
    _default_genesis_hours_per_period = 24

    # Time Constraints
    _default_minimum_worker_periods = 2
    _default_minimum_locked_periods = 4  # 28 days

    # Value Constraints
    _default_minimum_allowed_locked = NU(15_000, 'NU').to_nunits()
    _default_maximum_allowed_locked = NU(30_000_000, 'NU').to_nunits()

    # Slashing parameters
    HASH_ALGORITHM_KECCAK256 = 0
    HASH_ALGORITHM_SHA256 = 1
    HASH_ALGORITHM_RIPEMD160 = 2

    # Adjudicator
    _default_hash_algorithm = HASH_ALGORITHM_SHA256
    _default_base_penalty = 2
    _default_penalty_history_coefficient = 0
    _default_percentage_penalty_coefficient = 100000  # 0.001%
    _default_reward_coefficient = 2

    # Worklock
    from maya import MayaDT
    from web3 import Web3
    _default_worklock_supply: int = NU(225_000_000, 'NU').to_nunits()
    _default_bidding_start_date: int = MayaDT.from_iso8601(
        '2020-09-01T00:00:00.0Z').epoch
    _default_bidding_end_date: int = MayaDT.from_iso8601(
        '2020-09-28T23:59:59.0Z').epoch
    _default_cancellation_end_date: int = MayaDT.from_iso8601(
        '2020-09-30T23:59:59.0Z').epoch
    _default_worklock_boosting_refund_rate: int = 800
    _default_worklock_commitment_duration: int = 180
    _default_worklock_min_allowed_bid: int = Web3.toWei(5, "ether")

    def __init__(
            self,

            # StakingEscrow
            initial_supply: int,
            total_supply: int,
            issuance_decay_coefficient: int,
            lock_duration_coefficient_1: int,
            lock_duration_coefficient_2: int,
            maximum_rewarded_periods: int,
            first_phase_supply: int,
            first_phase_max_issuance: int,
            genesis_hours_per_period: int = _default_genesis_hours_per_period,
            hours_per_period: int = _default_hours_per_period,
            minimum_locked_periods: int = _default_minimum_locked_periods,
            minimum_allowed_locked: int = _default_minimum_allowed_locked,
            maximum_allowed_locked: int = _default_maximum_allowed_locked,
            minimum_worker_periods: int = _default_minimum_worker_periods,

            # Adjudicator
            hash_algorithm: int = _default_hash_algorithm,
            base_penalty: int = _default_base_penalty,
            penalty_history_coefficient:
        int = _default_penalty_history_coefficient,
            percentage_penalty_coefficient:
        int = _default_percentage_penalty_coefficient,
            reward_coefficient: int = _default_reward_coefficient,

            # WorkLock
            worklock_supply: int = _default_worklock_supply,
            bidding_start_date: int = _default_bidding_start_date,
            bidding_end_date: int = _default_bidding_end_date,
            cancellation_end_date: int = _default_cancellation_end_date,
            worklock_boosting_refund_rate:
        int = _default_worklock_boosting_refund_rate,
            worklock_commitment_duration:
        int = _default_worklock_commitment_duration,
            worklock_min_allowed_bid: int = _default_worklock_min_allowed_bid):
        """
        :param initial_supply: Number of tokens in circulating supply at t=0
        :param first_phase_supply: Number of tokens in circulating supply at phase switch (variable t)
        :param total_supply: Tokens at t=8
        :param first_phase_max_issuance: (Imax) Maximum number of new tokens minted per period during Phase 1.
        See Equation 7 in Staking Protocol & Economics paper.
        :param issuance_decay_coefficient: (d) Coefficient which modifies the rate at which the maximum issuance decays,
        only applicable to Phase 2. d = 365 * half-life / LOG2 where default half-life = 2.
        See Equation 10 in Staking Protocol & Economics paper
        :param lock_duration_coefficient_1: (k1) Numerator of the coefficient which modifies the extent
        to which a stake's lock duration affects the subsidy it receives. Affects stakers differently.
        Applicable to Phase 1 and Phase 2. k1 = k2 * small_stake_multiplier where default small_stake_multiplier = 0.5.
        See Equation 8 in Staking Protocol & Economics paper.
        :param lock_duration_coefficient_2: (k2) Denominator of the coefficient which modifies the extent
        to which a stake's lock duration affects the subsidy it receives. Affects stakers differently.
        Applicable to Phase 1 and Phase 2. k2 = maximum_rewarded_periods / (1 - small_stake_multiplier)
        where default maximum_rewarded_periods = 365 and default small_stake_multiplier = 0.5.
        See Equation 8 in Staking Protocol & Economics paper.
        :param maximum_rewarded_periods: (kmax) Number of periods beyond which a stake's lock duration
        no longer increases the subsidy it receives. kmax = reward_saturation * 365 where default reward_saturation = 1.
        See Equation 8 in Staking Protocol & Economics paper.
        :param genesis_hours_per_period: Hours in single period at genesis
        :param hours_per_period: Hours in single period
        :param minimum_locked_periods: Min amount of periods during which tokens can be locked
        :param minimum_allowed_locked: Min amount of tokens that can be locked
        :param maximum_allowed_locked: Max amount of tokens that can be locked
        :param minimum_worker_periods: Min amount of periods while a worker can't be changed

        :param hash_algorithm: Hashing algorithm
        :param base_penalty: Base for the penalty calculation
        :param penalty_history_coefficient: Coefficient for calculating the penalty depending on the history
        :param percentage_penalty_coefficient: Coefficient for calculating the percentage penalty
        :param reward_coefficient: Coefficient for calculating the reward
        """

        #
        # WorkLock
        #

        self.bidding_start_date = bidding_start_date
        self.bidding_end_date = bidding_end_date
        self.cancellation_end_date = cancellation_end_date
        self.worklock_supply = worklock_supply
        self.worklock_boosting_refund_rate = worklock_boosting_refund_rate
        self.worklock_commitment_duration = worklock_commitment_duration
        self.worklock_min_allowed_bid = worklock_min_allowed_bid

        #
        # NucypherToken & Staking Escrow
        #

        self.initial_supply = initial_supply
        # Remaining / Reward Supply - Escrow Parameter
        self.reward_supply = total_supply - initial_supply
        self.total_supply = total_supply
        self.first_phase_supply = first_phase_supply
        self.first_phase_total_supply = initial_supply + first_phase_supply
        self.first_phase_max_issuance = first_phase_max_issuance
        self.issuance_decay_coefficient = issuance_decay_coefficient
        self.lock_duration_coefficient_1 = lock_duration_coefficient_1
        self.lock_duration_coefficient_2 = lock_duration_coefficient_2
        self.maximum_rewarded_periods = maximum_rewarded_periods
        self.genesis_hours_per_period = genesis_hours_per_period
        self.hours_per_period = hours_per_period
        self.minimum_locked_periods = minimum_locked_periods
        self.minimum_allowed_locked = minimum_allowed_locked
        self.maximum_allowed_locked = maximum_allowed_locked
        self.minimum_worker_periods = minimum_worker_periods
        self.genesis_seconds_per_period = genesis_hours_per_period * 60 * 60  # Genesis seconds in a single period
        self.seconds_per_period = hours_per_period * 60 * 60  # Seconds in a single period
        self.days_per_period = hours_per_period // 24  # Days in a single period

        #
        # Adjudicator
        #

        self.hash_algorithm = hash_algorithm
        self.base_penalty = base_penalty
        self.penalty_history_coefficient = penalty_history_coefficient
        self.percentage_penalty_coefficient = percentage_penalty_coefficient
        self.reward_coefficient = reward_coefficient

    @property
    def erc20_initial_supply(self) -> int:
        return int(self.initial_supply)

    @property
    def erc20_reward_supply(self) -> int:
        return int(self.reward_supply)

    @property
    def erc20_total_supply(self) -> int:
        return int(self.total_supply)

    @property
    def staking_deployment_parameters(self) -> Tuple[int, ...]:
        """Cast coefficient attributes to uint256 compatible type for solidity+EVM"""
        deploy_parameters = (

            # Period
            self.genesis_hours_per_period,  # Hours in single period at genesis
            self.hours_per_period,  # Hours in single period

            # Coefficients
            self.
            issuance_decay_coefficient,  # Coefficient which modifies the rate at which the maximum issuance decays (d)
            self.
            lock_duration_coefficient_1,  # Numerator of the locking duration coefficient (k1)
            self.
            lock_duration_coefficient_2,  # Denominator of the locking duration coefficient (k2)
            self.
            maximum_rewarded_periods,  # Max periods that will be additionally rewarded (awarded_periods)
            self.first_phase_total_supply,  # Total supply for the first phase
            self.
            first_phase_max_issuance,  # Max possible reward for one period for all stakers in the first phase

            # Constraints
            self.
            minimum_locked_periods,  # Min amount of periods during which tokens can be locked
            self.
            minimum_allowed_locked,  # Min amount of tokens that can be locked
            self.
            maximum_allowed_locked,  # Max amount of tokens that can be locked
            self.
            minimum_worker_periods  # Min amount of periods while a worker can't be changed
        )
        return tuple(map(int, deploy_parameters))

    @property
    def slashing_deployment_parameters(self) -> Tuple[int, ...]:
        """Cast coefficient attributes to uint256 compatible type for solidity+EVM"""
        deployment_parameters = [
            self.hash_algorithm, self.base_penalty,
            self.penalty_history_coefficient,
            self.percentage_penalty_coefficient, self.reward_coefficient
        ]
        return tuple(map(int, deployment_parameters))

    @property
    def worklock_deployment_parameters(self):
        """
        0 token - Token contract
        1 escrow -  Staking Escrow contract
        ...
        2 startBidDate - Timestamp when bidding starts
        3 endBidDate - Timestamp when bidding will end
        4 endCancellationDate - Timestamp when cancellation window will end
        5 boostingRefund - Coefficient to boost refund ETH
        6 stakingPeriods - Duration of tokens locking
        7 minAllowedBid - Minimum allowed ETH amount for bidding
        """
        deployment_parameters = [
            self.bidding_start_date, self.bidding_end_date,
            self.cancellation_end_date, self.worklock_boosting_refund_rate,
            self.worklock_commitment_duration, self.worklock_min_allowed_bid
        ]
        return tuple(map(int, deployment_parameters))

    @property
    def bidding_duration(self) -> int:
        """Returns the total bidding window duration in seconds."""
        return self.bidding_end_date - self.bidding_start_date

    @property
    def cancellation_window_duration(self) -> int:
        """Returns the total cancellation window duration in seconds."""
        return self.cancellation_end_date - self.bidding_end_date
Exemplo n.º 11
0
    def _sync_with_aws(self, pref):
        latus.logger.log.info('entering _sync')
        updated_events = self._pull_down_new_db_entries(pref)
        for fs_event in updated_events:
            event_type = fs_event['event_type']
            hash_value = fs_event['file_hash']
            local_file_path = os.path.join(pref.get_latus_folder(), fs_event['file_path'])
            if os.path.exists(local_file_path):
                local_file_hash, _ = latus.hash.calc_sha512(local_file_path, pref.get_crypto_key())
            else:
                local_file_hash = None
            if event_type == LatusFileSystemEvent.created or event_type == LatusFileSystemEvent.modified:
                if hash_value != local_file_hash:
                    self.event_filter.add_event(local_file_path, event_type)
                    crypto_key = pref.get_crypto_key()
                    if crypto_key is None:
                        latus.logger.log.warning('no crypto_key yet')
                        return
                    crypto = latus.crypto.Crypto(crypto_key, pref.get_node_id())

                    if hash_value:
                        cache_fernet_file = os.path.join(pref.get_cache_folder(), hash_value + ENCRYPTION_EXTENSION)
                        self.s3.download_file(cache_fernet_file, hash_value)
                        latus.logger.log.info(
                            'originator=%s, event_type=%s, detection=%s, file_path="%s" - propagating to "%s" (file_hash=%s)' %
                            (fs_event['originator'], fs_event['event_type'], fs_event['detection'],
                             fs_event['file_path'], local_file_path, fs_event['file_hash']))
                        encrypt, shared, cloud = True, False, True  # todo: get this from pref
                        if encrypt:
                            expand_ok = crypto.decrypt_file(cache_fernet_file, local_file_path)
                            if expand_ok:
                                mtime = MayaDT.from_iso8601(fs_event['mtime']).epoch
                                os.utime(local_file_path, (mtime, mtime))
                            else:
                                # todo: something more elegant than just calling fatal here
                                latus.logger.log.fatal('Unable to decrypt (possible latus key error) : %s : %s' % (
                                cache_fernet_file, local_file_path))
                        else:
                            cloud_file = os.path.join(pref.get_cache_folder(),
                                                      fs_event['file_hash'] + UNENCRYPTED_EXTENSION)
                            shutil.copy2(cloud_file, local_file_path)
                    else:
                        latus.logger.log.warning('%s : hash is None for %s' % (pref.get_node_id(), local_file_path))
            elif event_type == LatusFileSystemEvent.deleted:
                try:
                    if os.path.exists(local_file_path):
                        latus.logger.log.info('%s : %s : %s deleted %s' % (
                        pref.get_node_id(), fs_event['detection'], fs_event['originator'], fs_event['file_path']))
                        self.event_filter.add_event(local_file_path, event_type)
                        send2trash.send2trash(local_file_path)
                except OSError:
                    # fallback
                    latus.logger.log.warn('%s : send2trash failed on %s' % (pref.get_node_id(), local_file_path))
            elif event_type == LatusFileSystemEvent.moved:
                # todo: make a specific 'moved' filter event - this one just uses the dest
                latus_path = pref.get_latus_folder()
                latus.logger.log.info('%s : %s : %s moved %s to %s' % (
                pref.get_node_id(), fs_event['detection'], fs_event['originator'], fs_event['src_path'],
                fs_event['file_path']))
                dest_abs_path = os.path.join(latus_path, fs_event['file_path'])
                src_abs_path = os.path.join(latus_path, fs_event['src_path'])
                if not os.path.exists(src_abs_path):
                    logger.log.info('%s : most recent is move of %s to %s but source does not exist - nothing to do' % (
                    pref.get_node_id(), src_abs_path, dest_abs_path))
                    return
                os.makedirs(os.path.dirname(dest_abs_path), exist_ok=True)
                # we'll get events for both src and dest
                self.event_filter.add_event(src_abs_path, event_type)
                self.event_filter.add_event(dest_abs_path, event_type)
                try:
                    shutil.move(src_abs_path, dest_abs_path)
                except IOError as e:
                    latus.logger.log.error('%s : %s' % (pref.get_node_id(), str(e)))
                    if os.path.exists(dest_abs_path):
                        latus.logger.log.error(
                            '%s : attempting move but %s already exists' % (pref.get_node_id(), dest_abs_path))
                    if not os.path.exists(src_abs_path):
                        latus.logger.log.error(
                            '%s : attempting move but %s not found' % (pref.get_node_id(), src_abs_path))
            else:
                latus.logger.log.error('not yet implemented : %s' % str(event_type))

        latus.logger.log.info('exiting _sync')
Exemplo n.º 12
0
def to_datetime(dt) -> datetime:
    if isinstance(dt, str):
        return MayaDT.from_iso8601(dt).datetime()
    return dt