Ejemplo n.º 1
0
    async def mirror_msg_new(self, message):
        if message.author.id == self.bot.user.id or message.channel.is_private:
            return

        channel = message.channel
        mirrored_channels = self.settings.get_mirrored_channels(channel.id)

        if not mirrored_channels:
            return

        last_spoke, last_spoke_timestamp = self.settings.get_last_spoke(channel.id)
        now_time = datetime.utcnow()
        last_spoke_time = datetime.utcfromtimestamp(
            last_spoke_timestamp) if last_spoke_timestamp else now_time
        attribution_required = last_spoke != message.author.id
        attribution_required |= (
            now_time - last_spoke_time).total_seconds() > ATTRIBUTION_TIME_SECONDS
        self.settings.set_last_spoke(channel.id, message.author.id)

        attachment_bytes = None
        if message.attachments:
            # If we know we're copying a message and that message has an attachment,
            # pre download it and reuse it for every upload.
            attachment = message.attachments[0]
            if 'url' and 'filename' in attachment:
                url = attachment['url']
                filename = attachment['filename']
                async with aiohttp.ClientSession() as session:
                    async with session.get(url) as response:
                        attachment_bytes = io.BytesIO(await response.read())

        for dest_channel_id in mirrored_channels:
            try:
                dest_channel = self.bot.get_channel(dest_channel_id)
                if not dest_channel:
                    continue

                if attribution_required:
                    msg = 'Posted by **{}** in *{} - #{}*:'.format(message.author.name,
                                                                   message.server.name,
                                                                   message.channel.name)
                    await self.bot.send_message(dest_channel, msg)

                if attachment_bytes:
                    dest_message = await self.bot.send_file(dest_channel, attachment_bytes, filename=filename, content=message.content)
                    attachment_bytes.seek(0)
                elif message.content:
                    dest_message = await self.bot.send_message(dest_channel, message.content)
                else:
                    print('Failed to mirror message from ', channel.id, 'no action to take')

                self.settings.add_mirrored_message(
                    channel.id, message.id, dest_channel.id, dest_message.id)
            except Exception as ex:
                print('Failed to mirror message from ', channel.id, 'to', dest_channel_id, ':', ex)
                traceback.print_exc()

        if attachment_bytes:
            attachment_bytes.close()
Ejemplo n.º 2
0
async def stockprofile(ctx, *, arg):
    current = requests.get("https://finnhub.io/api/v1/quote?symbol=" +
                           arg.upper() + "&token=bto4nln48v6v7atimad0")

    displaymsg = "Current Price: " + arg.upper() + " is $" + str(current.json()['c']) + "\n" + \
                 "High Price: " + arg.upper() + " was $" + str(current.json()['h']) + "\n" + \
                 "Low Price: " + arg.upper() + " was $" + str(current.json()['l']) + "\n" + \
                 "Open Price: " + arg.upper() + " is $" + str(current.json()['o']) + "\n" + \
                 "Previous Closing Price: " + arg.upper() + " was $" + str(current.json()['pc']) + "\n" + \
                 arg.upper() + "'s Time Stamp(Universal Time): " + \
                 datetime.utcfromtimestamp(current.json()['t']).strftime('%Y-%m-%d %H:%M:%S') + "\n"

    await ctx.send(displaymsg)
Ejemplo n.º 3
0
def parse_balances(table):
    for stock_name, stock_info in table.items():
        new_list = list()
        date_list = list(stock_info["deals"].values())
        date_list.reverse()
        balance = stock_info["balance"]
        sum_balance = 0
        for i in range(len(date_list)):
            if sum_balance >= balance:
                break
            sum_balance += date_list[i][1]
            new_list.append([
                datetime.utcfromtimestamp(
                    date_list[i][0]).strftime("%d.%m.%Y"), date_list[i][1],
                date_list[i][2]
            ])
        if new_list:
            new_list[-1][1] -= abs(sum_balance - balance)
        new_list.reverse()
        stock_info["deals"] = new_list
Ejemplo n.º 4
0
    def read_to(self, dataset, input_filepath, configs, appending):

        # read required parameters from config
        nr_gates = configs['parameters']['n_gates']
        range_gates = configs['parameters']['range_gates']

        # for every file to read open and process it
        with open(input_filepath, 'r') as file:
            # read in header
            header = self.read_header(file)

            # skip next lines
            line = file.readline()
            while '****' not in line:
                line = file.readline()

            # read rest of file containing measurement data
            raw_file = file.readlines()
        # file closed

        # check variables in header
        assert float(nr_gates) == header['Number of gates']
        assert float(range_gates) == header['Range gate length (m)']

        # split the info line for every ray from the rest of the data
        # measured_info contains timestamp, azimut, elevation, pitch and roll
        # i.e. 20.084231  39.81  -0.00 -0.20 0.10
        # all lines n*n_gates with 0 <= n < number of rays
        measured_info_s = raw_file[::int(nr_gates) + 1]

        # convert the measured info from list of str to numpy array
        measured_info = np.empty((len(measured_info_s), 5))
        # for i in range(len(measured_info_s)):
        #     measured_info[i, :] = np.fromstring(measured_info_s[i], dtype='f4', sep=' ')
        for i, line in enumerate(measured_info_s):
            measured_info[i, :] = np.fromstring(line, dtype='f4', sep=' ')

        # convert the time stamp from decimal hour of day to epoch double format
        header_day = header['Start time'].split(' ')[0]
        measured_info[:, 0] = decimaltime2sec(measured_info[:, 0], header_day)

        # the rest is the actual data with range gate number, doppler,
        # intensity and backscatter
        # i.e.   0 -0.2173 1.135933  7.655162E-6
        #        1 -0.2173 1.127027  7.154400E-6
        del (raw_file[::int(nr_gates + 1)])
        measured_data_s = [x.lstrip().rstrip() for x in raw_file]

        # convert measured data from list of str to numpy array
        measured_data = np.empty((len(measured_data_s), 4))
        # for i in range(len(measured_data_s)):
        #     measured_data[i, :4] = np.fromstring(measured_data_s[i], dtype='f4', sep=' ')
        for i, line in enumerate(measured_data_s):
            measured_data[i, :4] = np.fromstring(line, dtype='f4', sep=' ')

        # Dimensions
        n_rays = int(measured_info.shape[0])
        n_gates = int(header['Number of gates'])

        # Initialize the data set if not appending to existing data
        if not appending:
            # create the dimensions
            dataset.createDimension('range', header['Number of gates'])
            # the time dimension must be without limits (None) to append later
            dataset.createDimension('time', None)

            # create the coordinate variables

            # range
            # see header of measurement file
            # Center of gate = (range gate + 0.5) * Gate length
            gate_length = header['Range gate length (m)']
            _range_dist = (measured_data[0:nr_gates, 0] + 0.5) * gate_length
            range_dist = dataset.createVariable('range', 'f4', ('range', ))
            range_dist.units = 'm'
            range_dist.long_name = 'range_gate_distance_from_lidar'
            range_dist[:] = _range_dist
            range_dist.comment = 'distance to center of probe volume'

            # time
            # get start time for storing the campaign start (first measurement)
            # timestamp in comment
            start_time = datetime.utcfromtimestamp(
                measured_info[0, 0]).isoformat() + 'Z'
            # timestamps are stored as seconds since campaign start
            _time = measured_info[:, 0] - measured_info[0, 0]
            time = dataset.createVariable('time', 'f4', ('time', ))
            time.units = 's'
            time.long_name = 'time stamp'
            time[:] = _time
            time.comment = 'seconds since campaign start at ' + start_time

            # create the data variables
            # TODO: get the scan type from data
            scan_type = dataset.createVariable('scan_type', 'i', 'time')
            scan_type.units = 'none'
            scan_type.long_name = 'scan_type_of_the_measurement'
            scan_type[:] = np.ones(
                (n_rays, 1)) * get_scan_type(header['Filename'])

            # TODO: define scan ID
            scan_id = dataset.createVariable('scan_id', 'i', 'time')
            scan_id.units = 'none'
            scan_id.long_name = 'scan_id_of_the_measurement'

            #
            scan_cycle = dataset.createVariable('scan_cycle', 'i', 'time')
            scan_cycle.units = 'none'
            scan_cycle.long_name = 'scan_cycle_number'
            scan_cycle[:] = np.ones((n_rays, 1))

            # create the beam steering variables
            # azimuth
            _azimuth = measured_info[:, 1]
            azimuth_angle = dataset.createVariable('azimuth_angle', 'f4',
                                                   'time')
            azimuth_angle.units = 'degrees'
            azimuth_angle.long_name = 'azimuth_angle_of_lidar_beam'
            azimuth_angle[:] = _azimuth
            azimuth_angle.comment = 'clock-wise angle from north'
            azimuth_angle.accuracy = ''
            azimuth_angle.accuracy_info = 'max resolution 0.00072 degrees'

            # elevation
            _elevation = measured_info[:, 2]
            elevation_angle = dataset.createVariable('elevation_angle', 'f4',
                                                     'time')
            elevation_angle.units = 'degrees'
            elevation_angle.long_name = 'elevation_angle_of_lidar beam'
            elevation_angle[:] = _elevation
            elevation_angle.comment = 'upwards angle from horizontal'
            elevation_angle.accuracy = ''
            elevation_angle.accuracy_info = 'max resolution 0.00144 degrees'

            # yaw, pitch, roll
            # yaw is not available
            _yaw = np.zeros(measured_info[:, 3].shape)
            yaw = dataset.createVariable('yaw', 'f4', 'time')
            yaw.units = 'degrees'
            yaw.long_name = 'lidar_yaw_angle'
            yaw[:] = _yaw
            yaw.comment = 'The home position is configured in a way that 0 ' \
                          'azimuth corresponds to north.'
            yaw.accuracy = ''

            _pitch = measured_info[:, 3]
            pitch = dataset.createVariable('pitch', 'f4', 'time')
            pitch.units = 'degrees'
            pitch.long_name = 'lidar_pitch_angle'
            pitch[:] = _pitch
            pitch.comment = ''
            pitch.accuracy = ''
            pitch.accuracy_info = 'No information on pitch accuracy available.'

            _roll = measured_info[:, 4]
            roll = dataset.createVariable('roll', 'f4', 'time')
            roll.units = 'degrees'
            roll.long_name = 'lidar_roll_angle'
            roll[:] = _roll
            roll.comment = ''
            roll.accuracy = ''
            roll.accuracy_info = 'No information on roll accuracy available.'

            # measurement variables

            # Doppler velocity
            DOPPLER = dataset.createVariable('VEL', 'f4', ('time', 'range'))
            DOPPLER.units = 'm.s-1'
            DOPPLER.long_name = 'doppler'
            DOPPLER[:, :] = measured_data[:, 1].reshape(n_rays, n_gates)

            INTENSITY = dataset.createVariable('INTENSITY', 'f4',
                                               ('time', 'range'))
            INTENSITY.units = ''
            INTENSITY.long_name = 'intensity'
            INTENSITY.comment = 'snr + 1'
            INTENSITY[:] = measured_data[:, 2].reshape(n_rays, n_gates)

            BACKSCATTER = dataset.createVariable('BACKSCATTER', 'f4',
                                                 ('time', 'range'))
            BACKSCATTER.units = 'm-1.s-1'
            BACKSCATTER.long_name = 'backscatter'
            BACKSCATTER[:] = measured_data[:, 3].reshape(n_rays, n_gates)

        else:
            # get current number of stored measurements
            n_times = len(dataset.dimensions['time'])

            # time
            #get campaign start time
            _start_time = dataset.variables['time'].comment
            start_time = datetime.strptime(_start_time[-27:],
                                           "%Y-%m-%dT%H:%M:%S.%fZ")
            _time = measured_info[:, 0] - start_time.timestamp()
            dataset.variables['time'][n_times:] = _time

            # scan type
            _scan_type = np.ones(
                (n_rays, 1)) * get_scan_type(header['Filename'])
            dataset.variables['scan_type'][n_times:] = _scan_type

            # scan cycle
            _last_scan_cycle = dataset.variables['scan_cycle'][n_times - 1]
            _scan_cycle = np.ones((n_rays, 1)) * (_last_scan_cycle + 1)
            dataset.variables['scan_cycle'][n_times:] = _scan_cycle

            # azimuth
            _azimuth = measured_info[:, 1]
            dataset.variables['azimuth_angle'][n_times:] = _azimuth

            # elevation
            _elevation = measured_info[:, 2]
            dataset.variables['elevation_angle'][n_times:] = _elevation

            # yaw is not available
            _yaw = np.zeros(measured_info[:, 3].shape)
            dataset.variables['yaw'][n_times:] = _yaw

            # pitch
            _pitch = measured_info[:, 3]
            dataset.variables['pitch'][n_times:] = _yaw

            # roll
            _roll = measured_info[:, 4]
            dataset.variables['roll'][n_times:] = _yaw

            # doppler
            _doppler = measured_data[:, 1].reshape(n_rays, n_gates)
            dataset.variables['VEL'][n_times:] = _doppler

            # intensity
            _intensity = measured_data[:, 2].reshape(n_rays, n_gates)
            dataset.variables['INTENSITY'][n_times:] = _doppler

            # backscatter
            _backscatter = measured_data[:, 3].reshape(n_rays, n_gates)
            dataset.variables['BACKSCATTER'][n_times:] = _backscatter
Ejemplo n.º 5
0
    def delete_object_after(self,containername,prefix,objectname,ttl):
        
        httpVerb = "PUT"
        algorithm = 'AWS4-HMAC-SHA256'
        service = "s3"
        aws_request = "aws4_request"
        aws_access_key_id = self.user
        aws_secret_access_key = self.passwd
        endpoint_url = self.url
        host = self.url.split("/")[-1]
        region = "us-east-1"

        # Create a date for headers and the credential string
        t = datetime.utcnow()
        amzdate = t.strftime('%Y%m%dT%H%M%SZ')
        datestamp = t.strftime('%Y%m%d')  # Date w/o time, used in credential scope

        # since only midnight is allowed, deleting T%H:%M:%S
        d1_str = datetime.utcfromtimestamp(ttl).strftime("%Y-%m-%d")
        
        d1 = datetime.strptime(d1_str,"%Y-%m-%d") # convert to datetime        
        d2 = d1 + timedelta(days=1) # add 1 day        
        _delete_at_iso = d2.strftime("%Y-%m-%dT%H:%M:%SZ") # after adding 1 day, reconvert to str

        
        [body, content_md5, content_sha256] = self._prepare_xml(prefix, _delete_at_iso)

        canonical_uri = "/" + containername 

        canonical_headers = f"content-md5:{content_md5}\nhost:{host}\nx-amz-content-sha256:{content_sha256}\nx-amz-date:{amzdate}"
        
        signed_headers = "content-md5;host;x-amz-content-sha256;x-amz-date"

        credential_scope = datestamp + '/' + region + '/' + service + '/' + aws_request
        

        canonical_querystring = "lifecycle="

        

        headers = { "Content-MD5": content_md5, 
                    "Host": host,
                    "X-Amz-Content-Sha256": content_sha256,
                    "X-Amz-Date": amzdate}


        canonical_request = httpVerb + "\n" + canonical_uri + "\n" + canonical_querystring + "\n" + canonical_headers + '\n\n' + signed_headers + "\n" + content_sha256
       

        canonical_request_hash = hashlib.sha256(canonical_request.encode("utf-8")).hexdigest()

        string_to_sign = f"{algorithm}\n{amzdate}\n{credential_scope}\n{canonical_request_hash}"

        # Create the signing key
        signing_key = self.getSignatureKey(aws_secret_access_key, datestamp, region, service)

        # Sign the string_to_sign using the signing_key
        signature = hmac.new(signing_key, (string_to_sign).encode("utf-8"), hashlib.sha256).hexdigest()

        headers["Authorization"] = f"AWS4-HMAC-SHA256 Credential={aws_access_key_id}/{credential_scope}, SignedHeaders={signed_headers}, Signature={signature}"
    
        url = endpoint_url + canonical_uri +"?"+ canonical_querystring
       
        
        try:
            resp = requests.put(url, data=body, headers=headers)

            if resp.ok:
                logging.info(f"Object was marked as to be deleted at {_delete_at_iso}")

                return 0
            
            logging.error("Object couldn't be marked as delete-at")
            logging.error(resp.content)
            logging.error(resp.headers)
            return -1
        except Exception as e:
            logging.error(e)
            logging.error("Object couldn't be marked as delete-at")
            return -1
Ejemplo n.º 6
0
def epoch_to_formatted_string(epoch, string_format):
    assert isinstance(epoch, int)
    return dt.utcfromtimestamp(epoch).strftime(string_format)
Ejemplo n.º 7
0
async def timestamp(ctx):
    current = requests.get(
        'https://finnhub.io/api/v1/quote?symbol=AAPL&token=bto4nln48v6v7atimad0'
    )
    await ctx.send(":clock3: The time stamp is " + datetime.utcfromtimestamp(
        current.json()['t']).strftime('%Y-%m-%d %H:%M:%S'))