def run_collector(): lock = Lock() lock.acquire() Collector.collect() Actions.check_db_mention() Update.updater() time.sleep(5) UserData.user_database() CopyDb.copy_database() lock.release()
def test_edit_user_data(ident_service): # Ensure the head of the chain and the latest tx for the user are # in their initial states assert ident_service.latest_tx_list.get(user_1_cert_string) is None assert ident_service.last_transaction_hash == b'Base' # Create the initial data for the user data_1 = json.dumps({'first_name': 'Bob', 'last_name': 'Smith'}).encode() signature_1 = sign(user_1_private_key, data_1) initial_user_data = UserData(data_1, signature_1) # Add the user data to the service initial_tx_hash = ident_service.add_user_data(initial_user_data, user_1_cert_string) # Ensure the head of the chain and the latest tx hash for the user # have been updated assert initial_tx_hash == ident_service.latest_tx_list[user_1_cert_string] assert initial_tx_hash == ident_service.last_transaction_hash # Get the transaction from the service initial_tx = ident_service.get_transaction(initial_tx_hash) # Ensure the transaction points to the old head of the chain assert initial_tx.hash_pointer == b'Base' # Create the updated data for the user data_2 = json.dumps({ 'first_name': 'Bob', 'last_name': 'Smith', 'age': 35 }).encode() signature_2 = sign(user_1_private_key, data_2) updated_user_data = UserData(data_2, signature_2) # Add the updated data to the service updated_tx_hash = ident_service.add_user_data(updated_user_data, user_1_cert_string) # Ensure the head of the chain and the latest tx hash for the user # have been updated assert updated_tx_hash == ident_service.latest_tx_list[user_1_cert_string] assert updated_tx_hash == ident_service.last_transaction_hash # Get the new transaction updated_tx = ident_service.get_transaction(updated_tx_hash) # Ensure the new transaction points at the previous head of the chain assert updated_tx.hash_pointer == initial_tx_hash
def get_user_data(user_id): if user_id not in data: user = UserData(user_id) data[user_id] = user return user else: return data[user_id]
def data(self, index, role=Qt.DisplayRole): row = index.row() column = index.column() if role == Qt.TextAlignmentRole: return Qt.AlignCenter if role == Qt.DisplayRole: shipfu_id = str(self.shipfus[row].Shipfu.shipfu_id) try: shipfu_user_data = self.user_shipfus_data[shipfu_id] except KeyError: shipfu_user_data = UserData.initShipfuData() self.user_shipfus_data[shipfu_id] = shipfu_user_data values = (self.shipfus[row].Shipfu.shipfu_id, self.shipfus[row].Shipfu.image, self.shipfus[row].Shipfu.name, self.shipfus[row].Rarity.name, self.shipfus[row].ShipType.name, self.shipfus[row].Nation.name, shipfu_user_data["owned"], shipfu_user_data["mlb"], shipfu_user_data["max_level"], shipfu_user_data["max_affection"], self.shipfus[row].Shipfu.obtention_methods) return values[column]
def test_add_user_data(ident_service): # No transactions in the chain yet assert ident_service.last_transaction_hash == b'Base' # Create the data and signature data = json.dumps({'first_name': 'Bob', 'last_name': 'Smith'}).encode() signature = sign(user_1_private_key, data) user_data = UserData(data, signature) # Add the data to the service tx_hash = ident_service.add_user_data(user_data, user_1_cert_string) # Ensure the head of the chain is the new transaction assert tx_hash == ident_service.last_transaction_hash # Get the transaction and encryption key tx = ident_service.get_transaction(tx_hash) key = ident_service.get_key(tx_hash) # Ensure the transaction points to the old head of the chain assert tx.hash_pointer == b'Base' # Get the message and signature from the transaction decrypted = decrypt(key, tx.action.get_data()) message, signature = split_data(decrypted) # Ensure the data has not been tampered with verify(user_1_cert, message, signature) # Ensure the data matches what the user uploaded assert message == data
def __init__(self): super().__init__() self.user_data = UserData() self.setupUi() self.th = threading.Thread(target=self.checkAppVersion) self.th.start() self.timer = QtCore.QTimer(self) self.connect(self.timer, QtCore.SIGNAL("timeout()"), self.setUpdateInfoMessage) self.timer.start(500)
def __init__(self, *args, **kwds): kwds["style"] = kwds.get("style" ,0) wx.Frame.__init__(self, *args, **kwds) self.SetBackgroundColour(wx.Colour("WHITE")) self.user_data = [UserData('User 1'), UserData('User 2')] self.plot_panel = [None, None] self.signal_acqui = [None, None] self.analysis_panel = [None, None] self.animation_panel = [None, None] self.analysis_lower_left = [None, None] # Creting the custom title bar self.panelTitleBar = wx.Panel(self, wx.ID_ANY) self.btnManual = wx.Button(self.panelTitleBar, wx.ID_ANY, "", style=wx.BORDER_NONE | wx.BU_NOTEXT) self.btnColor = wx.Button(self.panelTitleBar, wx.ID_ANY, "", style=wx.BORDER_NONE | wx.BU_NOTEXT) self.btnMinimize = wx.Button(self.panelTitleBar, wx.ID_ANY, "-", style=wx.BORDER_NONE | wx.BU_NOTEXT) self.btnMaximize = wx.Button(self.panelTitleBar, wx.ID_ANY, "[]", style=wx.BORDER_NONE | wx.BU_NOTEXT) self.btnExit = wx.Button(self.panelTitleBar, wx.ID_ANY, "", style=wx.BORDER_NONE | wx.BU_NOTEXT) self.panelBody = wx.Panel(self, wx.ID_ANY) self.Bind(wx.EVT_BUTTON, self.OnBtnExitClick, self.btnExit) self.Bind(wx.EVT_BUTTON, self.OnBtnMinimizeClick, self.btnMinimize) self.Bind(wx.EVT_BUTTON, self.OnBtnMaximizeClick, self.btnMaximize) self.Bind(wx.EVT_BUTTON, self.OnBtnColorClick, self.btnColor) self.Bind(wx.EVT_BUTTON, self.OnBtnManualClick, self.btnManual) self.panelTitleBar.Bind(wx.EVT_LEFT_DOWN, self.OnTitleBarLeftDown) self.panelTitleBar.Bind(wx.EVT_MOTION, self.OnMouseMove) self._isClickedDown = False self._LastPosition = self.GetPosition() self.Maximize(True) self.__set_properties() self.__do_layout() pub.subscribe(self.changeColor, "COLOR_CHANGE") pub.subscribe(self.receiveMessage, "SEND_MESSAGE")
def test_share_data(ident_service): # Create the user data data = json.dumps({'first_name': 'Bob', 'last_name': 'Smith'}).encode() signature = sign(user_1_private_key, data) user_data = UserData(data, signature) # Add the user's data to the service tx_hash = ident_service.add_user_data(user_data, user_1_cert_string) # Create data to share with the service provider shared_data = json.dumps({'first_name': 'Bob'}).encode() signature = sign(user_1_private_key, shared_data) shared_user_data = UserData(shared_data, signature) # Add the shared data to the identity service shared_tx_hash = ident_service.share_data(shared_user_data, user_1_cert_string, sp_1_cert) # Ensure the head of the chain is the new transaction assert ident_service.last_transaction_hash == shared_tx_hash # Get the share transaction share_tx = ident_service.get_transaction(shared_tx_hash) # Ensure the share transaction points to the previous head of the chain assert tx_hash == share_tx.hash_pointer # As the service provider, get the encryption key and decrypt the data encrypted_encryption_key, encrypted_data = split_data( share_tx.action.get_data()) decrypted_encryption_key = decrypt_private(sp_1_private_key, encrypted_encryption_key) share_decrypted = decrypt(decrypted_encryption_key, encrypted_data) share_message, share_signature = split_data(share_decrypted) # Verify the data is signed by the user and hasn't been tampered with verify(user_1_cert, share_message, share_signature) # Ensure the data matches what the user uploaded to the service assert share_message == shared_data
def __init__(self, configs_directory='configs'): configs = SITHelper(configs_directory).get_configs('troposphere') self.TEMPLATE_DESCRIPTION = configs["template_description"] self.INSTANCE_TYPE = configs["instance_type"] self.SECURITY_GROUPS = configs["security_groups"] self.KEY_NAME = configs["key_name"] self.TAG_KEY = configs["tag_key"] self.TAG_VALUE = configs["tag_value"] self.AMI_ID = configs['ami_id'] self.EBS_VOLUME_SIZE = configs['ebs_volume_size'] self.EBS_DEVICE_NAME = configs['ebs_device_name'] self.MAX_SIZE = configs['max_size'] self.MIN_SIZE = configs['min_size'] self.SUBNET = configs['subnet'] self.CLUSTER_NAME = configs['cluster_name'] self.AUTOSCALING_GROUP_NAME = configs['autoscaling_group_name'] self.LAUNCH_CONFIGURATION_NAME = configs['launch_configuration_name'] self.AUTOSCALE_UP_ALARMS = configs['autoscale_up_alarms'] self.AUTOSCALE_DOWN_ALARMS = configs['autoscale_down_alarms'] self.ECS_TASK_CLEANUP_WAIT = configs['ecs_task_cleanup_wait_duration'] self.template = Template() self.user_data = UserData(configs_directory) self.init_template()
def __init__(self, researchUserData): super().__init__() self.setupUi(self) self.researchUserData = researchUserData researchShips = Data.getResearchShips() for researchShip in researchShips: shipfu_id = researchShip.Shipfu.shipfu_id try: researchShipUserData = self.researchUserData[shipfu_id] except KeyError: researchShipUserData = UserData.initResearchData() self.researchUserData[shipfu_id] = researchShipUserData self.gridLayout.addWidget(ResearchWidget(researchShip, researchShipUserData))
def __init__(self): self.window = Window() self.user_data = UserData() self.active_user = None self.main_menu = MainMenu(self.window, self) self.sign_in = SignIn(self.window, self) self.deposit_menu = DepositMenu(self.window, self) self.new_account = NewAccount(self.window, self) self.user_menu = UserMenu(self.window, self) self.transfer_menu = TransferMenu(self.window, self) self.account_created = AccountCreated(self.window, self) self.stack = [] self.switch_to("main_menu") self.window.mainloop()
def run_listener(): api = Actions.get_api() friend_ids = UserData.friend_ids() while True: try: myStreamListener = MyStreamListener() myStream = tweepy.Stream(auth=api.auth, listener=myStreamListener) myStream.filter(follow=friend_ids) except Exception as e: print(str(e) + '\n') continue
def on_data(self, data): friend_ids = UserData.friend_ids() all_data = json.loads(data) tweet = all_data["text"] created_at = all_data["created_at"] username = all_data["user"]["screen_name"] user_id = all_data["user"]["id_str"] tweet_id = all_data["id"] reply_id = all_data["in_reply_to_status_id"] tweet_source = all_data["source"] tweet_source = str(re.search(r'\>(.*?)\<', tweet_source).group(0))[1:-1] tweet_data = [ tweet, username, created_at, tweet_id, tweet_source, user_id, str(reply_id) ] if user_id in friend_ids: if '@BonneNick' in tweet: print('{}({}) - {}'.format(username, created_at, tweet)) Actions.add_mention_db(tweet_data) if reply_id is not None: AutoReply.check_4_id(Actions.get_api(), reply_id, tweet_id, username) else: print('{}({}) - {}'.format(username, created_at, tweet)) print() return True
def __init__(self, configs_directory='configs'): configs = SITHelper(configs_directory).get_configs('troposphere') self.TEMPLATE_DESCRIPTION = configs["template_description"] self.INSTANCE_TYPE = configs["instance_type"] self.SECURITY_GROUPS = configs["security_groups"] self.KEY_NAME = configs["key_name"] self.TAG_KEY = configs["tag_key"] self.TAG_VALUE = configs["tag_value"] self.AMI_ID = configs['ami_id'] self.MAX_SIZE = configs['max_size'] self.MIN_SIZE = configs['min_size'] self.SUBNET = configs['subnet'] self.CLUSTER_NAME = configs['cluster_name'] self.AUTOSCALING_GROUP_NAME = configs['autoscaling_group_name'] self.LAUNCH_CONFIGURATION_NAME = configs['launch_configuration_name'] self.SCALING_METRIC = configs['scaling_metric'] self.SCALE_UP_THRESHOLD = configs['scale_up_threshold'] self.SCALE_DOWN_THRESHOLD = configs['scale_down_threshold'] self.template = Template() self.user_data = UserData(configs_directory) self.init_template()
def __init__(self, shopEventUserData): super().__init__() self.setupUi(self) self.shopEventUserData = shopEventUserData self.itemWidgets = list() shopEventItems = Data.getShopEventItems() for i, shopEventItem in enumerate(shopEventItems): itemId = str(shopEventItem.event_buyable_id) try: shopEventItemUserData = self.shopEventUserData[itemId] except KeyError: shopEventItemUserData = UserData.initShopEventData() self.shopEventUserData[itemId] = shopEventItemUserData itemWidget = ShopItemWidget(shopEventItem, shopEventItemUserData) itemWidget.quantitySpinBox.valueChanged.connect( self.displayTotalPrice) self.itemWidgets.append(itemWidget) self.gridLayout.addWidget(itemWidget, i // NB_GRID_COLUMNS, i % NB_GRID_COLUMNS) self.displayTotalPrice()
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # """View for the front-end.""" from django import shortcuts from user_data import UserData user_data = UserData() def display_performance_database(request): """View for performance database table page.""" data = user_data.get_all_users_data() return shortcuts.render(request, 'data_table.html', {'all_users_data': data}) def display_configs(request): """View for config page.""" return shortcuts.render(request, 'configs.html', {}) def display_general_statistic(request, metric):
def allUsersData(): userData = UserData('/tmp/clientmetricsdb') return userData.getAllUsersData()
def test_multiple_users(ident_service): assert ident_service.last_transaction_hash == b'Base' # Create the first user's data data_1 = json.dumps({'first_name': 'Bob', 'last_name': 'Smith'}).encode() signature_1 = sign(user_1_private_key, data_1) user_data = UserData(data_1, signature_1) # Add the first user's data to the identity service user_1_tx_hash = ident_service.add_user_data(user_data, user_1_cert_string) # Ensure the head of the chain has been updated assert user_1_tx_hash == ident_service.last_transaction_hash # Create the second user's data data_2 = json.dumps({'first_name': 'Sally', 'last_name': 'Jones'}).encode() signature_2 = sign(user_2_private_key, data_2) user_data_2 = UserData(data_2, signature_2) # Add the second user's data to the identity service user_2_tx_hash = ident_service.add_user_data(user_data_2, user_2_cert_string) # Ensure the head of the chain has been updated assert user_2_tx_hash == ident_service.last_transaction_hash # Get the first user's transaction user_1_tx = ident_service.get_transaction(user_1_tx_hash) key_1 = ident_service.get_key(user_1_tx_hash) # Ensure the first user's transaction points to the base hash pointer assert user_1_tx.hash_pointer == b'Base' # Decrypt the first user's data decrypted_1 = decrypt(key_1, user_1_tx.action.get_data()) message_1, signature_1 = split_data(decrypted_1) # Validate the first user's data verify(user_1_cert, message_1, signature_1) # Ensure the data from the transaction matches the data the first # user uploaded assert message_1 == data_1 # Get the second user's transaction user_2_tx = ident_service.get_transaction(user_2_tx_hash) key_2 = ident_service.get_key(user_2_tx_hash) # Ensure the second user's transaction points to the first user's # transaction assert user_2_tx.hash_pointer == user_1_tx_hash # Decrypt the second user's transaction decrypted_2 = decrypt(key_2, user_2_tx.action.get_data()) message_2, signature_2 = split_data(decrypted_2) # Validate the second user's data verify(user_2_cert, message_2, signature_2) # Ensure the data from the transaction matches the data that the # second user uploaded to the identity service assert message_2 == data_2 # Ensure the latest transaction hash for the first and second # user match what we expect. assert ident_service.latest_tx_list[user_1_cert_string] == user_1_tx_hash assert ident_service.latest_tx_list[user_2_cert_string] == user_2_tx_hash
def test_add_share_edit(ident_service): # Create the initial user data data = json.dumps({'first_name': 'Bob', 'last_name': 'Smith'}).encode() signature = sign(user_1_private_key, data) user_data = UserData(data, signature) # Add the user data to the service initial_tx_hash = ident_service.add_user_data(user_data, user_1_cert_string) # Create data to share with the service provider shared_data = json.dumps({'first_name': 'Bob'}).encode() signature = sign(user_1_private_key, shared_data) shared_user_data = UserData(shared_data, signature) # Add the shared data to the identity service shared_tx_hash = ident_service.share_data(shared_user_data, user_1_cert_string, sp_1_cert) # Ensure the head of the chain has been updated assert ident_service.last_transaction_hash == shared_tx_hash # Create updated user data data_2 = json.dumps({ 'first_name': 'Robert', 'last_name': 'Smith', 'age': 35 }).encode() signature_2 = sign(user_1_private_key, data_2) updated_user_data = UserData(data_2, signature_2) # Update the user's data on the identity service updated_tx_hash = ident_service.add_user_data(updated_user_data, user_1_cert_string) # Ensure the head of the chain has been updated assert updated_tx_hash == ident_service.latest_tx_list[user_1_cert_string] # Ensure the user's latest hash has been updated assert updated_tx_hash == ident_service.last_transaction_hash # Get the updated data transaction updated_tx = ident_service.get_transaction(updated_tx_hash) # Ensure the transaction points to the previous head of the chain assert updated_tx.hash_pointer == shared_tx_hash # Get the share transaction share_tx = ident_service.get_transaction(shared_tx_hash) # Ensure the share transaction points to the initial user data transaction assert initial_tx_hash == share_tx.hash_pointer # As the service provider, get the message and signature from the transaction encrypted_encryption_key, encrypted_data = split_data( share_tx.action.get_data()) decrypted_encryption_key = decrypt_private(sp_1_private_key, encrypted_encryption_key) share_decrypted = decrypt(decrypted_encryption_key, encrypted_data) share_message, share_signature = split_data(share_decrypted) # Ensure the data is valid verify(user_1_cert, share_message, share_signature) # Ensure the data matches what the user uploaded at the time of # creating the share transaction and not the new user data. assert share_message == shared_data
class SITTemplate(object): def __init__(self, configs_directory='configs'): configs = SITHelper(configs_directory).get_configs('troposphere') self.TEMPLATE_DESCRIPTION = configs["template_description"] self.INSTANCE_TYPE = configs["instance_type"] self.SECURITY_GROUPS = configs["security_groups"] self.KEY_NAME = configs["key_name"] self.TAG_KEY = configs["tag_key"] self.TAG_VALUE = configs["tag_value"] self.AMI_ID = configs['ami_id'] self.EBS_VOLUME_SIZE = configs['ebs_volume_size'] self.EBS_DEVICE_NAME = configs['ebs_device_name'] self.MAX_SIZE = configs['max_size'] self.MIN_SIZE = configs['min_size'] self.SUBNET = configs['subnet'] self.CLUSTER_NAME = configs['cluster_name'] self.AUTOSCALING_GROUP_NAME = configs['autoscaling_group_name'] self.LAUNCH_CONFIGURATION_NAME = configs['launch_configuration_name'] self.SCALING_METRIC = configs['scaling_metric'] self.SCALE_UP_THRESHOLD = configs['scale_up_threshold'] self.SCALE_DOWN_THRESHOLD = configs['scale_down_threshold'] self.ECS_TASK_CLEANUP_WAIT = configs['ecs_task_cleanup_wait_duration'] self.template = Template() self.user_data = UserData(configs_directory) self.init_template() def print_template(self): print self.template.to_json() def init_template(self): self.template.add_description(self.TEMPLATE_DESCRIPTION) ecs_cluster = self.template.add_resource(Cluster( self.CLUSTER_NAME ) ) ecs_instance_role = self.template.add_resource(Role( 'sitInstanceRole', Path='/', AssumeRolePolicyDocument={ "Statement": [{ "Effect": "Allow", "Principal": { "Service": ["ec2.amazonaws.com"] }, "Action": ["sts:AssumeRole"] }] } ) ) ecs_instance_profile = self.template.add_resource(InstanceProfile( 'sitInstanceProfile', Path='/', Roles=[Ref(ecs_instance_role)] ) ) ecs_instance_policy = self.template.add_resource(PolicyType( 'sitInstancePolicy', PolicyName='ecs-policy', Roles=[Ref(ecs_instance_role)], PolicyDocument={ "Statement": [{ "Effect": "Allow", "Action": [ "ecs:CreateCluster", "ecs:RegisterContainerInstance", "ecs:DeregisterContainerInstance", "ecs:DiscoverPollEndpoint", "ecs:Submit*", "ecs:Poll", "ecs:StartTelemetrySession", "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage", "logs:CreateLogStream", "logs:PutLogEvents" ], "Resource": "*" }], } ) ) commands = { '01_add_instance_to_cluster': { 'command': Join('', ['#!/bin/bash\n', 'echo ECS_CLUSTER=', Ref(ecs_cluster), '$"\n"ECS_ENGINE_TASK_CLEANUP_WAIT_DURATION=', self.ECS_TASK_CLEANUP_WAIT, ' >> /etc/ecs/ecs.config']) } } files = { "/etc/cfn/cfn-hup.conf": { "content" : Join("", [ "[main]\n", "stack=", Ref("AWS::StackId"), "\n", "region=", Ref("AWS::Region"), "\n" ]), "mode": "000400", "owner": "root", "group": "root" }, "/etc/cfn/hooks.d/cfn-auto-reloader.conf": { "content": Join("", [ "[cfn-auto-reloader-hook]\n", "triggers=post.update\n", "path=Resources.{0}.Metadata.AWS::CloudFormation::Init\n".format(self.LAUNCH_CONFIGURATION_NAME), "action=/opt/aws/bin/cfn-init -v ", " --stack ", Ref("AWS::StackName"), " --resource {0}".format(self.LAUNCH_CONFIGURATION_NAME), " --region ", Ref("AWS::Region"), "\n", "runas=root\n" ]) } } services = { "sysvinit": { "cfn-hup": { "enabled": "true", "ensureRunning": "true", "files": [ "/etc/cfn/cfn-hup.conf", "/etc/cfn/hooks.d/cfn-auto-reloader.conf" ] } } } launch_configuration = self.template.add_resource(LaunchConfiguration( self.LAUNCH_CONFIGURATION_NAME, ImageId=self.AMI_ID, IamInstanceProfile=Ref(ecs_instance_profile), InstanceType=self.INSTANCE_TYPE, UserData=self.user_data.get_base64_data(), AssociatePublicIpAddress=False, SecurityGroups=self.SECURITY_GROUPS, KeyName=self.KEY_NAME, Metadata=autoscaling.Metadata( cloudformation.Init({ "config": cloudformation.InitConfig( commands=commands, files=files, services=services ) }) ), BlockDeviceMappings=[autoscaling.BlockDeviceMapping( DeviceName=self.EBS_DEVICE_NAME, Ebs=autoscaling.EBSBlockDevice( DeleteOnTermination=True, VolumeSize=self.EBS_VOLUME_SIZE, VolumeType='gp2' ) )] ) ) auto_scaling_group = self.template.add_resource(AutoScalingGroup( self.AUTOSCALING_GROUP_NAME, MaxSize=self.MAX_SIZE, MinSize=self.MIN_SIZE, LaunchConfigurationName=Ref(launch_configuration), VPCZoneIdentifier=[self.SUBNET] ) ) """ Scale UP Policy """ scaling_up_policy = self.template.add_resource(ScalingPolicy( '{0}ScaleUpPolicy'.format(self.AUTOSCALING_GROUP_NAME), AdjustmentType='ChangeInCapacity', AutoScalingGroupName=Ref(auto_scaling_group), Cooldown=60, ScalingAdjustment='1' )) """ Cloud Watch Alarm """ self.template.add_resource(Alarm( '{0}ScaleUpAlarm'.format(self.AUTOSCALING_GROUP_NAME), ActionsEnabled=True, Namespace='AWS/ECS', MetricName=self.SCALING_METRIC, ComparisonOperator='GreaterThanOrEqualToThreshold', Threshold=self.SCALE_UP_THRESHOLD, EvaluationPeriods=1, Statistic='Average', Period=60, AlarmActions=[Ref(scaling_up_policy)], Dimensions=[ MetricDimension( Name='ClusterName', Value=Ref(ecs_cluster) ) ] )) """ Scale DOWN Policy """ scaling_down_policy = self.template.add_resource(ScalingPolicy( '{0}ScaleDownPolicy'.format(self.AUTOSCALING_GROUP_NAME), AdjustmentType='ChangeInCapacity', AutoScalingGroupName=Ref(auto_scaling_group), Cooldown=60, ScalingAdjustment='-1' )) """ Cloud Watch Alarm """ self.template.add_resource(Alarm( '{0}ScaleDownAlarm'.format(self.AUTOSCALING_GROUP_NAME), ActionsEnabled=True, Namespace='AWS/ECS', MetricName=self.SCALING_METRIC, ComparisonOperator='LessThanOrEqualToThreshold', Threshold=self.SCALE_DOWN_THRESHOLD, EvaluationPeriods=1, Statistic='Average', Period=300, AlarmActions=[Ref(scaling_down_policy)], Dimensions=[ MetricDimension( Name='ClusterName', Value=Ref(ecs_cluster) ) ] ))
class SITTemplate(object): def __init__(self, configs_directory='configs'): configs = SITHelper(configs_directory).get_configs('troposphere') self.TEMPLATE_DESCRIPTION = configs["template_description"] self.INSTANCE_TYPE = configs["instance_type"] self.SECURITY_GROUPS = configs["security_groups"] self.KEY_NAME = configs["key_name"] self.TAG_KEY = configs["tag_key"] self.TAG_VALUE = configs["tag_value"] self.AMI_ID = configs['ami_id'] self.EBS_VOLUME_SIZE = configs['ebs_volume_size'] self.EBS_DEVICE_NAME = configs['ebs_device_name'] self.MAX_SIZE = configs['max_size'] self.MIN_SIZE = configs['min_size'] self.SUBNET = configs['subnet'] self.CLUSTER_NAME = configs['cluster_name'] self.AUTOSCALING_GROUP_NAME = configs['autoscaling_group_name'] self.LAUNCH_CONFIGURATION_NAME = configs['launch_configuration_name'] self.AUTOSCALE_UP_ALARMS = configs['autoscale_up_alarms'] self.AUTOSCALE_DOWN_ALARMS = configs['autoscale_down_alarms'] self.ECS_TASK_CLEANUP_WAIT = configs['ecs_task_cleanup_wait_duration'] self.template = Template() self.user_data = UserData(configs_directory) self.init_template() def print_template(self): print self.template.to_json() def init_template(self): self.template.add_description(self.TEMPLATE_DESCRIPTION) ecs_cluster = self.template.add_resource(Cluster(self.CLUSTER_NAME)) ecs_instance_role = self.template.add_resource( Role('sitInstanceRole', Path='/', AssumeRolePolicyDocument={ "Statement": [{ "Effect": "Allow", "Principal": { "Service": ["ec2.amazonaws.com"] }, "Action": ["sts:AssumeRole"] }] })) ecs_instance_profile = self.template.add_resource( InstanceProfile('sitInstanceProfile', Path='/', Roles=[Ref(ecs_instance_role)])) ecs_instance_policy = self.template.add_resource( PolicyType('sitInstancePolicy', PolicyName='ecs-policy', Roles=[Ref(ecs_instance_role)], PolicyDocument={ "Statement": [{ "Effect": "Allow", "Action": [ "ecs:CreateCluster", "ecs:RegisterContainerInstance", "ecs:DeregisterContainerInstance", "ecs:DiscoverPollEndpoint", "ecs:Submit*", "ecs:Poll", "ecs:StartTelemetrySession", "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage", "logs:CreateLogStream", "logs:PutLogEvents" ], "Resource": "*" }], })) commands = { '01_add_instance_to_cluster': { 'command': Join('', [ '#!/bin/bash\n', 'echo ECS_CLUSTER=', Ref(ecs_cluster), '$"\n"ECS_ENGINE_TASK_CLEANUP_WAIT_DURATION=', self.ECS_TASK_CLEANUP_WAIT, ' >> /etc/ecs/ecs.config' ]) } } files = { "/etc/cfn/cfn-hup.conf": { "content": Join("", [ "[main]\n", "stack=", Ref("AWS::StackId"), "\n", "region=", Ref("AWS::Region"), "\n" ]), "mode": "000400", "owner": "root", "group": "root" }, "/etc/cfn/hooks.d/cfn-auto-reloader.conf": { "content": Join("", [ "[cfn-auto-reloader-hook]\n", "triggers=post.update\n", "path=Resources.{0}.Metadata.AWS::CloudFormation::Init\n". format(self.LAUNCH_CONFIGURATION_NAME), "action=/opt/aws/bin/cfn-init -v ", " --stack ", Ref("AWS::StackName"), " --resource {0}".format( self.LAUNCH_CONFIGURATION_NAME), " --region ", Ref("AWS::Region"), "\n", "runas=root\n" ]) } } services = { "sysvinit": { "cfn-hup": { "enabled": "true", "ensureRunning": "true", "files": [ "/etc/cfn/cfn-hup.conf", "/etc/cfn/hooks.d/cfn-auto-reloader.conf" ] } } } launch_configuration = self.template.add_resource( LaunchConfiguration(self.LAUNCH_CONFIGURATION_NAME, ImageId=self.AMI_ID, IamInstanceProfile=Ref(ecs_instance_profile), InstanceType=self.INSTANCE_TYPE, UserData=self.user_data.get_base64_data(), AssociatePublicIpAddress=False, SecurityGroups=self.SECURITY_GROUPS, KeyName=self.KEY_NAME, Metadata=autoscaling.Metadata( cloudformation.Init({ "config": cloudformation.InitConfig( commands=commands, files=files, services=services) })), BlockDeviceMappings=[ autoscaling.BlockDeviceMapping( DeviceName=self.EBS_DEVICE_NAME, Ebs=autoscaling.EBSBlockDevice( DeleteOnTermination=True, VolumeSize=self.EBS_VOLUME_SIZE, VolumeType='gp2')) ])) auto_scaling_group = self.template.add_resource( AutoScalingGroup(self.AUTOSCALING_GROUP_NAME, MaxSize=self.MAX_SIZE, MinSize=self.MIN_SIZE, Cooldown=60, LaunchConfigurationName=Ref(launch_configuration), VPCZoneIdentifier=[self.SUBNET])) """ Scale UP Policy """ scaling_up_policy = self.template.add_resource( ScalingPolicy('{0}ScaleUpPolicy'.format( self.AUTOSCALING_GROUP_NAME), AdjustmentType='ChangeInCapacity', AutoScalingGroupName=Ref(auto_scaling_group), Cooldown=60, ScalingAdjustment='1')) for alarm_name, alarm in self.AUTOSCALE_UP_ALARMS.iteritems(): """ Cloud Watch Alarm """ self.template.add_resource( Alarm('{0}ScaleUp{1}'.format(self.AUTOSCALING_GROUP_NAME, alarm_name), ActionsEnabled=True, Namespace='AWS/ECS', MetricName=alarm['scaling_metric'], ComparisonOperator='GreaterThanOrEqualToThreshold', Threshold=alarm['scale_up_threshold'], EvaluationPeriods=1, Statistic=alarm['statistic'], Period=alarm['period'], AlarmActions=[Ref(scaling_up_policy)], Dimensions=[ MetricDimension(Name='ClusterName', Value=Ref(ecs_cluster)) ])) """ Scale DOWN Policy """ scaling_down_policy = self.template.add_resource( ScalingPolicy('{0}ScaleDownPolicy'.format( self.AUTOSCALING_GROUP_NAME), AdjustmentType='ChangeInCapacity', AutoScalingGroupName=Ref(auto_scaling_group), Cooldown=60, ScalingAdjustment='-1')) for alarm_name, alarm in self.AUTOSCALE_DOWN_ALARMS.iteritems(): """ Cloud Watch Alarm """ self.template.add_resource( Alarm('{0}ScaleDown{1}'.format(self.AUTOSCALING_GROUP_NAME, alarm_name), ActionsEnabled=True, Namespace='AWS/ECS', MetricName=alarm['scaling_metric'], ComparisonOperator='LessThanOrEqualToThreshold', Threshold=alarm['scale_down_threshold'], EvaluationPeriods=1, Statistic=alarm['statistic'], Period=alarm['period'], AlarmActions=[Ref(scaling_down_policy)], Dimensions=[ MetricDimension(Name='ClusterName', Value=Ref(ecs_cluster)) ]))
def singleUserData(clientId): userData = UserData('/tmp/clientmetricsdb') return userData.getSingleUserData(clientId)
def allUsersSingleMetricData(metric): userData = UserData('/tmp/clientmetricsdb') return userData.getAllUsersSingleMetricData(metric)