Esempio n. 1
0
    def build(self):
        """Create daisy book."""
        print("Building DaisyBook...")
        self.folder_name = self.title + '_' + get_current_timestamp() + '/'
        create_dir(self.output_folder + self.folder_name)
        print("All files stored in ", self.output_folder + self.folder_name)

        self.elapsed_time = 0
        print("Processing Pages....")
        for i, page in enumerate(self.pages):
            print("----Generating [HTML] file; Pg {}".format(i))
            html_file = make_html(page, self.title, i,
                                  self.output_folder + self.folder_name,
                                  self.tag_config)
            print("----Generating [MP3] file; Pg {}".format(i))
            audio_lengths, audio_file = make_audio(
                page, self.title, i, self.output_folder + self.folder_name)
            print("----Generating [SMIL] file; Pg {}".format(i))
            smil_header = make_smil(page, i, audio_lengths, self.title,
                                    self.elapsed_time,
                                    self.output_folder + self.folder_name,
                                    self.tag_config)

            self.audio_files.append(audio_file)
            self.html_files.append(html_file)
            self.smil_headers.append(smil_header)

            self.elapsed_time += sum(audio_lengths)
        print("----Generating [NCC] file; Book: {}".format(self.title))
        self.make_ncc()
Esempio n. 2
0
    def get_exchange_rates_list_message(self):

        data_loaded_from_file = utils.read_data_from_file_as_json(
            os.path.join(os.environ['work_dir'], 'rates.json'))

        expiration_time = 10 * 60 * 1000
        last_timestamp = int(data_loaded_from_file['timestamp'])
        current_timestamp = utils.get_current_timestamp()

        f = open(os.path.join(os.environ['work_dir'], 'rates.json'), 'w')

        if current_timestamp - last_timestamp < expiration_time:
            data_loaded_from_file['timestamp'] = current_timestamp
            utils.save_json_to_file(data_loaded_from_file, f)

            return utils.format_decimal_values(data_loaded_from_file['rates'])

        try:
            data_loaded_from_api = self._get_exchange_rates_response_for_usd()
        except BadRequestException:
            return 'Something went wrong.'

        rates = data_loaded_from_api['rates']
        utils.save_json_to_file(
            {
                'rates': rates,
                'timestamp': current_timestamp
            }, f)

        return utils.format_decimal_values(rates)
Esempio n. 3
0
    def __init__(self, scalename):
        super(self.__class__, self).__init__()
        # QtWidgets.QWidget.__init__(self)

        self.scalename = scalename
        with open('./config.json') as json_file:
            config = load(json_file)
        data_dir = config['data_directory']
        self.longitudinal_scales = config['longitudinal_scales']

        # this filename is ONLY for longitudinal scales
        timestamp = get_current_timestamp().replace('-', '').replace(':', '')
        self.results_fname = f'{data_dir}/longitudinal/{scalename}_{timestamp}.json'

        from os import makedirs
        from os.path import exists, dirname
        if not exists(dirname(self.results_fname)):
            makedirs(dirname(self.results_fname))

        widget = QtWidgets.QWidget()
        grid = QtWidgets.QGridLayout()
        widget.setLayout(grid)

        self.load_probes()
        self.payload = {
        }  # to hold results (though they get overwritten everytime so could just init each time :/)

        self.QProbes = {}  # for later accessing the data
        for i, (probe_var, probe_info) in enumerate(self.probes.items()):
            # add the label, which is the probe text question
            Label = QtWidgets.QLabel(probe_info['Description'])
            grid.addWidget(Label, i, 0, 1, 1, QtCore.Qt.AlignBottom)

            # add the response option, which ** varies dependent on response type **
            if 'min' in probe_info['Levels'].keys():
                # value counter
                self.QProbes[probe_var] = QtWidgets.QSpinBox()
                self.QProbes[probe_var].setRange(probe_info['Levels']['min'],
                                                 probe_info['Levels']['max'])
                self.QProbes[probe_var].setSingleStep(
                    probe_info['Levels']['step'])
            else:
                # dropdown menu
                self.QProbes[probe_var] = QtWidgets.QComboBox()
                # for level_val, level_name in probe_info['Levels'].items():
                self.QProbes[probe_var].addItems(probe_info['Levels'].values())
                self.QProbes[probe_var].currentIndexChanged.connect(
                    self.update_payload)
            grid.addWidget(self.QProbes[probe_var], i, 1)

        self.setWidget(widget)
        self.setWidgetResizable(True)

        # main window stuff
        # self.setMinimumWidth(widget.sizeHint().width())
        width, height = widget.sizeHint().width(), 500
        xloc, yloc = 500, 300
        self.setGeometry(xloc, yloc, width, height)
        self.setWindowTitle(scalename)
Esempio n. 4
0
 def get_mining_template(self) -> Block:
     hdr = BlockHeader(timestamp=get_current_timestamp(),
                       prev_hash=self.get_top_block().hash(),
                       target=int256_to_bytes(FIXED_TARGET),
                       nonce=0x00000000,
                       tx_hash=self.get_queued_tx_hash())
     block = Block(header=hdr, txs=self.tx_queue)
     return block
Esempio n. 5
0
 def __init__(self):
     self.api_resource = HTTPSConnection(host='api.exchangeratesapi.io')
     if not os.path.exists(
             os.path.join(os.environ['work_dir'], 'rates.json')):
         f = open(os.path.join(os.environ['work_dir'], 'rates.json'), 'w')
         data = self._get_exchange_rates_response_for_usd()
         utils.save_json_to_file(
             {
                 'rates': data['rates'],
                 'timestamp': utils.get_current_timestamp()
             }, f)
def add_sensor_record(reading: float, sensor_type: str) -> None:
    """Adds one record to the sensor collection in the MongoDB database"""
    collection = db_utils.get_collection_object(
        collection_name=config.MONGODB_COLLECTION_SENSOR_DATA)
    ts = utils.get_current_timestamp()
    dt = utils.timestamp_to_datetime(timestamp_obj=ts)
    obj_to_add = {
        '_id': utils.generate_random_id(),
        'reading': reading,
        'timestamp': ts,
        'datetime': str(dt),
        'sensorType': sensor_type,
    }
    collection.insert_one(obj_to_add)
    return None
Esempio n. 7
0
 def save_session_info(self):
     ses_payload = {'acq_time': get_current_timestamp()}
     for label, lineedit in zip(self.setupLabels, self.setupLEdits):
         response = lineedit.text()
         if response:
             # sleep aids an be separate by commas
             if ',' in response and label.text() in ['sleep_aids', 'LDIMs']:
                 response = [r.strip() for r in response.split(',')]
             ses_payload[label.text()] = response
     with open(self.session_fname, 'w') as json_file:
         dump(ses_payload,
              json_file,
              sort_keys=True,
              indent=4,
              ensure_ascii=False)
     print(f'Saved {self.session_fname}')
Esempio n. 8
0
    def __init__(self, session_dir, parent=None):
        super(self.__class__, self).__init__(parent)

        with open('./config.json') as json_file:
            config = load(json_file)
        # data_dir = config['data_directory']
        self.available_scales = config['arousal_scales']

        # generates arousal id
        # self.init_new_arousal()

        # for creating/saving
        timestamp = get_current_timestamp().replace('-', '').replace(':', '')
        self.dream_fname = f'{session_dir}/arousal-{timestamp}.json'

        self.init_CentralWidget()
Esempio n. 9
0
    def init_new_session(self):
        """Append the session file with a new session and timestamp it.
        And add an *empty* arousal.tsv file.
        """
        if not self.sender().isChecked():
            self._sessionRunning = False
        else:

            timestamp = get_current_timestamp()
            ts4fname = timestamp.replace('-', '').replace(':', '')
            self.session_dir = f'{self.data_dir}/session-{ts4fname}'
            self.session_fname = f'{self.session_dir}/session_info.json'

            os.mkdir(self.session_dir)
            self.save_session_info()

            self._sessionRunning = True
Esempio n. 10
0
def get_upcoming_events():
    """
    Get events with start time in future sorted oldes first
    """
    redis_instance = app.extensions['redis']['REDIS']

    now_seconds = get_current_timestamp()

    upcoming_events = redis_instance.zrangebyscore(app.config['REDIS_KEY_EVENT_START'], now_seconds, '+inf')

    # Setup Redis transaction
    pipe = redis_instance.pipeline()

    # Add commands to transaction
    for short_name in upcoming_events:
        pipe.hgetall(app.config['REDIS_KEY_EVENT_INFO'] % short_name)

    # Execute and return True if no Falses in Redis multi bulk reply
    return pipe.execute()
Esempio n. 11
0
    def save_dream_json(self):

        # initialize the payload empty because only adding stuff that was completeed
        payload = {'acq_time': get_current_timestamp()}

        # get arousal type
        arousal_type = self.aroTypeLEdit.text()
        if len(arousal_type) > 0:
            payload['arousal_type'] = arousal_type

        # get text dream report
        dream_report = self.reportText.toPlainText()
        if len(dream_report) > 0:
            payload['report'] = dream_report

        # get text memory sources report
        memsrc_report = self.memsrcText.toPlainText()
        if len(memsrc_report) > 0:
            payload['memory_sources'] = memsrc_report

        # get lucidity response
        downBttns = [b for b in self.lucidityRadBttns if b.isChecked()]
        if len(downBttns) > 0:
            payload['lucid'] = downBttns[0].text()

        # add scale responses
        for scalename, scalewidg in self.popups.items():
            if self.popups_completed[scalename]:
                payload[scalename] = scalewidg.payload
                # payload['scales'][scalename] = [ slid.value() for slid in scalewidg.sliders.values() ]
            # responses = { qnum: slid.value() for qnum, slid in scalewidg.sliders.items() }

        # save
        # # If it's the first arousal, then need to make dreams directory.
        # if self.aro_id == 'aro-001':
        #     os.mkdir(os.path.dirname(self.dream_fname))
        with open(self.dream_fname, 'w') as outfile:
            dump(payload,
                 outfile,
                 sort_keys=True,
                 indent=4,
                 ensure_ascii=False)
        print(f'Saved {self.dream_fname}')
Esempio n. 12
0
def cleanup_events():
    """
    Cleanup old events from Redis. Redis key expires should take care of everything but sorted sets.
    """
    redis_instance = app.extensions['redis']['REDIS']

    # Get short names of old events
    now_seconds = get_current_timestamp()
    removable_names = redis_instance.zrangebyscore(app.config['REDIS_KEY_EVENT_END'], '-inf', now_seconds)

    # Setup Redis transaction
    pipe = redis_instance.pipeline()

    # Add commands to transaction
    pipe.zrem(app.config['REDIS_KEY_EVENT_START'], removable_names)
    pipe.zrem(app.config['REDIS_KEY_EVENT_END'], removable_names)

    # Execute and return True if no Falses in Redis multi bulk reply
    #return False not in pipe.execute()
    return pipe.execute()
Esempio n. 13
0
def get_ongoing_events():
    """
    Get events with start time in past sorted oldest first.
    Expires and cleanup_events() should cause only relevant events to show up.
    """
    redis_instance = app.extensions['redis']['REDIS']

    now_seconds = get_current_timestamp()

    upcoming_events = redis_instance.zrangebyscore(app.config['REDIS_KEY_EVENT_START'], '-inf', now_seconds)

    # Setup Redis transaction
    pipe = redis_instance.pipeline()

    # Add commands to transaction
    for short_name in upcoming_events:
        pipe.hgetall(app.config['REDIS_KEY_EVENT_INFO'] % short_name)

    # Execute and return True if no Falses in Redis multi bulk reply
    return pipe.execute()
Esempio n. 14
0
def copy_new_lead_from_customer(customer, customer_account):
    new_lead = Lead(customer_account.mobile)
    new_lead.full_name = customer.full_name
    new_lead.short_name = customer.short_name
    new_lead.sea_type = LeadSeaType.TAOMI_PRIVATE
    new_lead.relation_id = customer.relation_id
    new_lead.lead_source = LeadSource.TAOMI_DEPT
    new_lead.lead_status = TaomiStatus.L

    new_lead.city = customer_account.city
    new_lead.city_cn = customer_account.city_cn
    new_lead.main_category = customer_account.main_category
    new_lead.main_category_cn = customer_account.main_category_cn
    new_lead.top_category = customer_account.top_category
    new_lead.top_category_cn = customer_account.top_category_cn

    new_lead.copy_time = get_current_timestamp()
    new_lead.is_frozen = 1

    db.session.add(new_lead)
    db.session.commit()
    new_lead.build_search()

    return new_lead
Esempio n. 15
0
token = json.loads(token_response).get("data")

if not token:
    print("failed to obtain authentication token")
else:
    # request headers
    headers = {"languageType": "zh_CHS", "X-Authorization": token}
    # request parameters
    params = {"rotateDegree": 0, "modelType": 0, "isResponseImageRequired": True}

    use_binary = False

    if use_binary:
        binary_files = {'file': (file_name, open(file_path, 'rb'))}
        response = requests.post(url=alignment_url, headers=headers, data=params, files=binary_files, verify=False).text
        response = json.loads(response)
        print(json.dumps(response, ensure_ascii=False))
    else:
        base64_img = utils.img_to_base64(file_path)
        base64_files = {'base64Image': (None, base64_img)}
        response = requests.post(url=alignment_url, headers=headers, data=params, files=base64_files, verify=False).text
        response = json.loads(response)
        print(json.dumps(response, ensure_ascii=False))
    
    # If isResponseImageRequired is set to True, store the processed image
    response_data = response.get('data')
    if response_data:
        base64_img_data = response_data.get('base64ImageDst')
        if base64_img_data and base64_img_data != '':
            utils.base64_to_img(base64_img_data, utils.get_current_timestamp() + ".jpg")
Esempio n. 16
0
international.add_ambit(
    Ambito("Internacional",
           get_radio_channels_from_part(canales_internacionales)))

canales_andorra = stringbetween(content, "## Andorra", "")
andorra.add_ambit(
    Ambito("Andorra", get_radio_channels_from_part(canales_andorra)))

# Save data to JSON file
json_result = {
    "license": get_license_info(),
    "epg_url": None,
    "countries": [spain.to_json(),
                  international.to_json(),
                  andorra.to_json()],
    "updated": get_current_timestamp()
}
json_file = open('./public/output/radio_channels.json', "w+")
json_file.write(json.dumps(json_result, indent=4, sort_keys=False))
json_file.close()
print("JSON Updated")

# Save data to M3U8 file
text_file = open('./public/output/radio_channels.m3u8', "w+")
text_file.write("#EXTM3U" + "\n")
text_file.write("# @LaQuay https://github.com/LaQuay/TDTChannels" + "\n")
text_file.write(spain.to_m3u8())
text_file.write(international.to_m3u8())
text_file.write(andorra.to_m3u8())
text_file.close()
print("M3U8 Updated")
Esempio n. 17
0
def monthly_energy_csv():
    """Returns the WattWatchers long_energy endpoint formatted as csv

    params:
    - api_key: as specified by the WattWatchers API docs
    - device_id: as specified by the WattWatchers API docs
    - from_ts (optional)

    response:
    - status 200, with the csv in the body for successful requests
      Note: the units for the energy fields are kWh, unlike the WW API
    - status 400 for unsuccessful requests
    """

    # get parameters
    api_key = request.args.get('api_key')
    device_id = request.args.get('device_id')
    from_ts = request.args.get('from_ts')
    to_ts = request.args.get('to_ts')
    granularity = request.args.get('granularity') or '30m'

    if wattwatchers_api.GRANULARITY.index(
            granularity) < wattwatchers_api.GRANULARITY.index('30m'):
        return 'Invalid granularity. Smallest granularity for monthly data is 30m', 400
    if from_ts and not from_ts.isdigit():
        return 'Invalid unix timestamp for from_ts.', 400
    if to_ts and not to_ts.isdigit():
        return 'Invalid unix timestamp for to_ts.', 400

    # compute timestamps to ensure we get 1 month worth of data
    # assume one month is 31 days for simplicity
    one_month_in_secs = 31 * 24 * 60 * 60
    if not from_ts and not to_ts:
        to_ts = get_current_timestamp()
        from_ts = to_ts - one_month_in_secs
    elif not to_ts:
        to_ts = int(from_ts) + one_month_in_secs
    elif not from_ts:
        from_ts = int(to_ts) - one_month_in_secs

    # make call to WattWatchers API
    try:
        results = wattwatchers_api.long_energy(api_key,
                                               device_id,
                                               granularity=granularity,
                                               from_ts=from_ts,
                                               to_ts=to_ts)
    except Exception as e:
        return 'Could not fetch data from wattwachers api: %s' % str(e), 400

    # convert energy values from joules to kWh
    for entry in results:
        for key in entry:
            if key in wattwatchers_api.ALL_ENERGY_FIELDS:
                entry[key] = [joules_to_kwh(int(x)) for x in entry[key]]

    # determine the number of channels
    num_channels = 0
    if isinstance(results, list) and len(results) > 0 and isinstance(
            results[0], dict):
        num_channels = len(results[0].get(
            wattwatchers_api.LONG_ENERGY_DATA_FIELDS[0], []))

    csv = formatters.to_csv(
        results,
        header_fields=wattwatchers_api.HEADER_FIELDS,
        data_fields=wattwatchers_api.LONG_ENERGY_DATA_FIELDS,
        num_channels=num_channels)

    return Response(csv, mimetype='text/plain')