Beispiel #1
0
 def prepare_obj(self,
                 file_like_object,
                 allowed_exts_url=None,
                 filename=None):
     """
     Подготовка объектов(в основном картинок) для загрузки.
     То есть метод позволяет преобразовывать почти из любого формата
     """
     # url
     parsed_url = None
     if isinstance(file_like_object, str):
         parsed_url = urlparse(file_like_object)
     if parsed_url and parsed_url.hostname:
         if allowed_exts_url:
             extension = parsed_url.path.split('.')[-1].lower()
             is_default_extension = extension not in allowed_exts_url
             is_vk_image = 'userapi.com' in parsed_url.hostname
             if is_default_extension and not is_vk_image:
                 raise PWarning(
                     f"Загрузка по URL доступна только для {' '.join(allowed_exts_url)}"
                 )
         self.public_download_url = file_like_object
     elif isinstance(file_like_object, bytes):
         if filename:
             tmp = NamedTemporaryFile()
             tmp.write(file_like_object)
             tmp.name = filename
             tmp.seek(0)
             self.content = tmp
         else:
             self.content = file_like_object
     # path
     elif isinstance(file_like_object,
                     str) and os.path.exists(file_like_object):
         with open(file_like_object, 'rb') as file:
             file_like_object = file.read()
             self.content = file_like_object
     elif isinstance(file_like_object, BytesIO):
         file_like_object.seek(0)
         _bytes = file_like_object.read()
         if filename:
             tmp = NamedTemporaryFile()
             tmp.write(_bytes)
             tmp.name = filename
             tmp.seek(0)
             self.content = tmp
         else:
             self.content = _bytes
Beispiel #2
0
    def test_other_field(self) -> None:
        cookie_str: str = "abcd=abcd;"

        file_content: str = "Hello Word!"

        f1 = NamedTemporaryFile(delete=True)
        file_name: str = f1.name
        f1.write(file_content.encode())
        f1.seek(0)
        f2 = NamedTemporaryFile(delete=True)
        f2.name = file_name  # type: ignore
        f2.write(file_content.encode())
        f2.seek(0)

        test_helper: TornadoTestHelper[HTTPResponse] = TornadoTestHelper(
            self,
            OtherFieldHandler.post,
            cookie_dict={"cookie": cookie_str},
            file_dict={f1.name: f1.read()},
            form_dict={
                "a": "1",
                "b": "2",
                "c": "3"
            },
        )
        content_type, body = self.encode_multipart_formdata(
            data={
                "a": "1",
                "b": "2",
                "c": "3"
            }, files={file_name: f2.read()})
        response: HTTPResponse = self.fetch(
            "/api/other_field",
            headers={
                "cookie": cookie_str,
                "Content-Type": content_type,
                "content-length": str(len(body))
            },
            method="POST",
            body=body,
        )
        for resp in [test_helper.post(), response]:
            resp = json.loads(resp.body.decode())
            assert {
                "filename": file_name,
                "content": file_content,
                "form_a": "1",
                "form_b": "2",
                "form_c": ["3"],
                "cookie": {
                    "abcd": "abcd"
                },
            } == resp["data"]
Beispiel #3
0
    def execute(self, context):
        """
        Executes the staging command from the list of srms requested.
        """
        if isinstance(self.srmfile, dict):
            task_name = self.srmfile['name']
            task_parent_dag = self.srmfile['parent_dag']
            sbx_xcom = get_task_instance(context, task_name, task_parent_dag)
            self.srmfile = sbx_xcom[self.srmkey]

        elif not os.path.isfile(self.srmfile) and not hasattr(
                self.srms, '__iter__'):
            self.srmfile = Variable.get(self.srmfile)
            if not os.path.isfile(self.srmfile):
                self.status = State.UPSTREAM_FAILED
                raise AirflowException(
                    "Input srmfile doesn't exist and srm list not a list")
        self.progress = {'Percent done': 0}

        surl_list = srmlist.srmlist(
        )  #holds all the srms (both from file and list argument )
        self.surl_list = self.build_srm_list(surl_list)
        try:
            self.stage_ID = stager_access.stage(list(self.surl_list))
        except xmlrpclib.Fault:
            sleep(60)
            self.stage_ID = stager_access.stage(list(self.surl_list))
        logging.info(
            "Successfully sent staging command for " +
            stager_access.get_progress()[str(self.stage_ID)]['File count'] +
            " files.")
        logging.info("StageID= " + str(self.stage_ID))

        self.state = State.RUNNING
        sleep(120)
        try:
            self.progress = stager_access.get_progress()[str(self.stage_ID)]
        except:
            pass
        self.started = False
        f = NamedTemporaryFile(delete=False)
        for i in surl_list:
            f.write(bytes(i, encoding='utf8'))
        f.close()
        if not f:
            f.name = ""
        while self.still_running():
            sleep(120)
        if self.state == State.SUCCESS:
            return {'srmfile': str(f.name)}
        self.state == State.FAILED
        return {'srmfile': str(f.name)}
Beispiel #4
0
    def test_other_field(self, client: TestClient) -> None:
        cookie_str: str = "abcd=abcd;"

        file_content: str = "Hello Word!"

        f1 = NamedTemporaryFile(delete=True)
        file_name: str = f1.name
        f1.write(file_content.encode())
        f1.seek(0)
        f2 = NamedTemporaryFile(delete=True)
        f2.name = file_name  # type: ignore
        f2.write(file_content.encode())
        f2.seek(0)

        test_helper: StarletteTestHelper[Response] = StarletteTestHelper(
            client,
            other_field_route,
            cookie_dict={"cookie": cookie_str},
            file_dict={"upload_file": f1},
            form_dict={
                "a": "1",
                "b": "2",
                "c": ["3"]
            },
        )
        for resp in [
                test_helper.post().json(),
                client.post(
                    "/api/other_field",
                    data={
                        "a": "1",
                        "b": "2",
                        "c": ["3"]
                    },
                    headers={
                        "cookie": cookie_str
                    },
                    files={
                        "upload_file": f2
                    },
                ).json(),
        ]:
            assert {
                "filename": file_name.split("/")[-1],
                "content": file_content,
                "form_a": "1",
                "form_b": "2",
                "form_c": ["3"],
                "cookie": {
                    "abcd": "abcd"
                },
            } == resp["data"]
Beispiel #5
0
    def draw_videos(self):
        i = 0
        for video in reversed(self.track_videos):
            overlay = NamedTemporaryFile(
                suffix='.avi',
                delete=self.delete
            )

            # Removes all white spaces and non alphanumeric chars from title
            video.options['title'] = re.sub(
                '[^\w\.]',
                '',
                video.options['title']
            ) + '.webm'
            # Trim the video if it needs to be
            if (video.options['from'] != 0 or
                video.options['end'] - video.options['from'] <
                    video.options['duration']):

                self.editor.trim(
                    video.options['title'],
                    overlay.name,
                    seconds_to_timecode(video.options['from']),
                    seconds_to_timecode(video.options['duration'])
                )
            else:
                overlay.name = video.options['title']

            # Also scale the video down to size
            scaled_overlay = NamedTemporaryFile(
                suffix='.avi',
                delete=self.delete
            )

            self.editor.scale_video(
                overlay.name,
                scaled_overlay.name,
                percent_to_px(video.options['width'], self.size[0]),
                percent_to_px(video.options['height'], self.size[1]),
            )
            overlay.close()

            out = NamedTemporaryFile(suffix='.avi', delete=self.delete)

            self.overlay_videos(self.current_video.name, scaled_overlay.name,
                                video.options, out.name)
            scaled_overlay.close()

            self.current_video = out
            i += 1
Beispiel #6
0
    def update(self, instance, validated_data):
        """
        override update instance method
        """
        img_temp = NamedTemporaryFile(delete=True)
        img_temp.write(urlopen(validated_data["image_url"]).read())
        img_temp.name = f"{validated_data['external_id']}.jpg"
        instance.external_id = validated_data["external_id"]
        instance.author = validated_data["author"]
        instance.image_file = File(img_temp)
        instance.save()

        img_temp.flush()

        return instance
Beispiel #7
0
 def get():
     main = current_app.main
     fin_cnt = 3
     non_fin_cnt = 7
     main.select(fin_cnt, Sector.FINANCIAL)
     main.select(non_fin_cnt, Sector.NON_FINANCIAL)
     f = NamedTemporaryFile()
     f.name = f'{f.name}.csv'
     r_file = os.path.join(os.getcwd(), "domain\\service\\porfolio.r")
     main.save_returns_matrix_to_csv(f.name)
     res = subprocess.check_output(
         f'"C:\\Program Files\\R\\R-3.6.0\\bin\\Rscript.exe" "{r_file}" "{f.name}"',
         shell=True)
     main.reset()
     return make_response(matrix_loader(res.decode('utf-8').split('\r\n')))
Beispiel #8
0
def decloakToNamedTemporaryFile(filepath, name=None):
    retVal = NamedTemporaryFile()
    def __del__():
        try:
            if hasattr(retVal, 'old_name'):
                retVal.name = old_name
            retVal.close()
        except OSError:
            pass
    retVal.__del__ = __del__
    retVal.write(decloak(filepath))
    retVal.seek(0)
    if name:
        retVal.old_name = retVal.name
        retVal.name = name
    return retVal
Beispiel #9
0
def gunzip(filename):
    """Unzip a ``.gz`` file to a temporary file and returns its path."""
    if not filename.endswith('.gz'):
        #colors.printc("gunzip() error: file must end with .gz", c=1)
        return filename
    from tempfile import NamedTemporaryFile
    import gzip

    tmp_file = NamedTemporaryFile(delete=False)
    tmp_file.name = os.path.join(os.path.dirname(tmp_file.name),
                                 os.path.basename(filename).replace('.gz',''))
    inF = gzip.open(filename, "rb")
    outF = open(tmp_file.name, "wb")
    outF.write(inF.read())
    outF.close()
    inF.close()
    return tmp_file.name
Beispiel #10
0
    def create(self, validated_data):
        """
        override create instance method
        """
        img_temp = NamedTemporaryFile(delete=True)
        img_temp.write(urlopen(validated_data["image_url"]).read())
        img_temp.name = f"{validated_data['external_id']}.jpg"

        instance = Image.objects.create(
            external_id=validated_data["external_id"],
            author=validated_data["author"],
            tags=validated_data["tags"],
            image_file=File(img_temp),
        )
        img_temp.flush()

        return instance
Beispiel #11
0
    def test_other_field(self, client: FlaskClient) -> None:

        file_content: str = "Hello Word!"

        f1 = NamedTemporaryFile(delete=True)
        file_name: str = f1.name
        f1.write(file_content.encode())
        f1.seek(0)
        f2 = NamedTemporaryFile(delete=True)
        f2.name = file_name  # type: ignore
        f2.write(file_content.encode())
        f2.seek(0)

        flask_test_helper: FlaskTestHelper[Response] = FlaskTestHelper(
            client,
            other_field_route,
            file_dict={"upload_file": f1},
            form_dict={
                "a": "1",
                "b": "2",
                "c": "3"
            })

        client.set_cookie("localhost", "abcd", "abcd")
        for resp in [
                flask_test_helper.post().get_json(),
                client.post("/api/other_field",
                            data={
                                "a": "1",
                                "b": "2",
                                "upload_file": f2,
                                "c": "3"
                            }).get_json(),
        ]:
            assert {
                "filename": file_name,
                "content": file_content,
                "form_a": "1",
                "form_b": "2",
                "form_c": ["3"],
                "cookie": {
                    "abcd": "abcd"
                },
            } == resp["data"]
Beispiel #12
0
def db_backup(message):
    my_id = db.select('users',
                      "name = '" + TELEGRAM_NAME + "'")[0]['telegram_id']
    if (message.from_user.id != my_id):
        return
    bot.send_chat_action(message.from_user.id, "upload_document")
    ps = subprocess.Popen(['pg_dump', DB_URL], stdout=subprocess.PIPE)
    output = ps.communicate()[0]
    backup_f = NamedTemporaryFile()
    backup_f.write(output)
    backup_f.name = "backup - " + datetime.now().strftime(
        '%Y-%m-%d %H:%M:%S') + ".sql"
    backup_f.seek(0)

    telebot.types.Message.de_json(
        telebot.apihelper._make_request(
            API_TOKEN,
            'sendDocument',
            params={'chat_id': str(message.from_user.id)},
            files={'document': backup_f},
            method='post'))
Beispiel #13
0
    def draw_videos(self):
        i = 0
        for video in reversed(self.track_videos):
            overlay = NamedTemporaryFile(suffix='.avi', delete=self.delete)

            # Removes all white spaces and non alphanumeric chars from title
            video.options['title'] = re.sub('[^\w\.]', '',
                                            video.options['title']) + '.webm'
            # Trim the video if it needs to be
            if (video.options['from'] != 0
                    or video.options['end'] - video.options['from'] <
                    video.options['duration']):

                self.editor.trim(
                    video.options['title'], overlay.name,
                    seconds_to_timecode(video.options['from']),
                    seconds_to_timecode(video.options['duration']))
            else:
                overlay.name = video.options['title']

            # Also scale the video down to size
            scaled_overlay = NamedTemporaryFile(suffix='.avi',
                                                delete=self.delete)

            self.editor.scale_video(
                overlay.name,
                scaled_overlay.name,
                percent_to_px(video.options['width'], self.size[0]),
                percent_to_px(video.options['height'], self.size[1]),
            )
            overlay.close()

            out = NamedTemporaryFile(suffix='.avi', delete=self.delete)

            self.overlay_videos(self.current_video.name, scaled_overlay.name,
                                video.options, out.name)
            scaled_overlay.close()

            self.current_video = out
            i += 1
Beispiel #14
0
    def post(self):
        """POST request handler. Processes form data"""

        # check if user uploaded an excel file
        uploaded_file = request.files['file']
        if uploaded_file and not (uploaded_file.filename.endswith('.xls') or
                                  uploaded_file.filename.endswith('.xlsx')):
            flash("Uploaded file is not an .xls or .xlsx file", "error")
            return redirect(url_for('index'))

        # save file to /tmp folder
        temp_file = NamedTemporaryFile()
        temp_file_name_w_extension = ntpath.basename(temp_file.name)
        temp_file.name = temp_file.name\
            .replace(temp_file_name_w_extension, uploaded_file.filename)
        uploaded_file.save(temp_file.name)

        # get output format
        output_format = request.form.get('format')
        output_ext = '.' + output_format
        # process output format and mime type for downloading
        post_process_to = None
        mime_type = 'text/html' if output_format == 'html'\
            else 'application/text'
        if output_format in ('pdf', 'doc'):
            post_process_to = output_format

        # convert uploaded file to html
        temp_html_file = NamedTemporaryFile()
        html_file_path = copy(temp_file.name).replace('.xlsx', '')\
            .replace('.xls', '') + '.' + 'html'
        temp_html_file.name = html_file_path

        # TODO: This hard-makes PPP conv to HTML. Change to doc if doc, etc.
        out_format = 'html' if output_format == 'pdf' else output_format

        ''' command_line = \
            self._build_ppp_ppp_tool_run_cmd(in_file_path=temp_file.name,
                                             out_format=out_format,
                                             out_file_path=html_file_path)
        _, stderr = self._run_background_process(command_line) '''
        ppp_resp = self._run_ppp_api(in_file_path=temp_file.name,
                                     out_format=out_format,
                                     out_file_path=html_file_path)

        # if ppp.ppp tool wrote something to stderr, we should show it to user
        # if stderr:
        if not ppp_resp:
            flash("STDERR:\n{}".format(ppp_resp), "error")
            return redirect(url_for('index'))

        # output path now exists and refers to converted html file at /tmp
        pdf_doc_file_path = html_file_path

        # if output format is PDF or DOC
        if post_process_to == 'pdf':
            try:
                w_p = app_config.WKHTMLTOPDF_PATH_LOCAL
                pdf_doc_file_name, pdf_doc_file_path, mime_type = \
                    self._convert_to_pdf(_input=html_file_path,
                                         wkhtmltopdf_path=w_p)
            except OSError:
                try:
                    # TODO: This hasn't been fully implementaed
                    w_p = 'This hasnt been implemented yet. My WKTHMLTOPDF ' \
                          'is installed globally. This message will throw ' \
                          'an error.'
                    # w_p = app_config.WKHTMLTOPDF_PATH_SYSTEM
                    pdf_doc_file_name, pdf_doc_file_path, mime_type = \
                        self._convert_to_pdf(_input=html_file_path,
                                             wkhtmltopdf_path=w_p)
                except FileNotFoundError:
                    # TODO: download and install a binary
                    msg = 'PDF conversion is currently not supported for ' \
                          'this system: {}'.format(platform())
                    raise Exception(msg)
        elif post_process_to == 'doc':
            pdf_doc_file_name, pdf_doc_file_path, mime_type = \
                self._convert_to_doc(_input=html_file_path)

        # return file as response attachment, so browser will start download
        return send_file(pdf_doc_file_path,
                         as_attachment=True,
                         mimetype=mime_type,
                         attachment_filename=uploaded_file.filename
                         .replace('.xlsx', output_ext)
                         .replace('.xls', output_ext))
Beispiel #15
0
'''
    临时文件来进行操作
'''

from tempfile import TemporaryFile, NamedTemporaryFile

tf = TemporaryFile(dir='')  # 实例化出来就可以啦,在运行完之后就会释放,可以用dir来指定目录来进行操作

ntf = NamedTemporaryFile()  # 也可以添加前缀和后缀来进行指定吧
ntf.name()  # 可以指定目录
Beispiel #16
0
    def reducer(self, key, values):
        # obtain the needed info from the key
        modelling_unit, multipliers, lat, lon, timezone = key.split('~')
        lat = float(lat)
        lon = float(lon)
        multipliers = ast.literal_eval(multipliers)  # string to dict
        multiplier = {}
        for i in multipliers:
            multiplier[i['deviceId']] = i['multiplier']
        columns = [x[0] for x in self.config['hive']['final_table_fields']]
        df = pd.DataFrame.from_records(values, index='ts', columns=columns)
        energy_type = df.energyType.unique()[0]
        grouped = df.groupby('deviceId')
        df_new_hourly = None
        for device, data in grouped:
            if device not in multiplier.keys():
                continue
            data = data[~data.index.duplicated(keep='last')]
            data = data.sort_index()
            if df_new_hourly is None:
                df_new_hourly = data[['value']] * multiplier[device]
            else:
                df_new_hourly += data[['value']] * multiplier[device]

        weather = df.drop(['value', 'energyType', 'deviceId'], axis=1)
        weather = weather[~weather.index.duplicated(keep='last')]
        df_new_hourly = df_new_hourly.join(weather)
        df_new_hourly = df_new_hourly[self.config['module_config']
                                      ['model_features']]
        df_new_hourly = df_new_hourly.sort_index()
        df_value = df_new_hourly[['value']].resample('H').sum()
        df_weather = df_new_hourly[[
            "temperature", "windSpeed", "GHI", "windBearing"
        ]].resample('H').max()
        df_new_hourly = df_value.join(df_weather)
        if self.config['save_data_debug']:
            mongo = MongoClient(self.config['mongodb']['host'],
                                self.config['mongodb']['port'])
            mongo[self.config['mongodb']['db']].authenticate(
                self.config['mongodb']['username'],
                self.config['mongodb']['password'])

            mongo[self.config['mongodb']['db']][
                self.config['module_config']['mongo_debug']].replace_one(
                    {"modelling_unit": modelling_unit}, {
                        "modelling_unit": modelling_unit,
                        "multipliers": multipliers,
                        "df": df_new_hourly.reset_index().to_dict('records'),
                        "lat": lat,
                        "lon": lon,
                        "timezone": timezone
                    },
                    upsert=True)

            mongo.close()

        freq = calculate_frequency(df_new_hourly)
        whole_day_index = len(
            np.arange(pd.Timedelta('1 days'), pd.Timedelta('2 days'), freq))
        df_new_hourly = df_new_hourly.resample(freq).asfreq()
        df_new_hourly.index = df_new_hourly.index.tz_localize("UTC")
        df_new_hourly.index = df_new_hourly.index.tz_convert(timezone)
        count = df_new_hourly.groupby([
            df_new_hourly.index.year, df_new_hourly.index.month,
            df_new_hourly.index.day
        ])
        complete_days = [
            datetime(*day).date() for day, x in count
            if x.count()['value'] >= whole_day_index
        ]
        df_new_hourly = df_new_hourly[df_new_hourly.index.tz_localize(
            None).floor('D').isin(complete_days)]
        # All data for clustering.
        df_new_hourly = df_new_hourly.assign(
            clustering_values=df_new_hourly.value.rolling(
                5, center=True, min_periods=1).mean())
        self.increment_counter("M", "O", amount=1)
        try:
            model = train_gaussian_mixture_model(
                df_new_hourly[['value', 'clustering_values']],
                "clustering_values", [2, 3, 4], timezone)
            structural = model[0][['time', 's', 'dayhour']]
            structural = structural.set_index('time')
            structural['s'] = pd.to_numeric(structural.s, errors='coerce')
        except Exception as e:
            if "time" in df_new_hourly:
                df_new_hourly.drop("time", axis=1)
            mongo = MongoClient(self.config['mongodb']['host'],
                                self.config['mongodb']['port'])
            mongo[self.config['mongodb']['db']].authenticate(
                self.config['mongodb']['username'],
                self.config['mongodb']['password'])

            mongo[self.config['mongodb']['db']][
                self.config['module_config']['mongo_error']].replace_one(
                    {"modellingUnitId": modelling_unit},
                    {
                        "modellingUnitId": modelling_unit,
                        "model": "error in clustering",
                        #"df": df_new_hourly.reset_index().to_dict('records'),
                        "multipliers": multipliers,
                        "lat": lat,
                        "lon": lon,
                        "timezone": timezone,
                        "exception": str(e),
                        "error": 1
                    },
                    upsert=True)
            mongo.close()
            return

        try:
            df_new_hourly = df_new_hourly.merge(structural,
                                                how='right',
                                                right_index=True,
                                                left_index=True)
            df_new_hourly = df_new_hourly[df_new_hourly.dayhour.isna() ==
                                          False]
            df_new_hourly = df_new_hourly[df_new_hourly.s.isna() == False]
            count = df_new_hourly.groupby([
                df_new_hourly.index.year, df_new_hourly.index.month,
                df_new_hourly.index.day
            ])
            complete_days = [
                datetime(*day).date() for day, x in count
                if x.count()['value'] >= whole_day_index
            ]
            df_new_hourly = df_new_hourly[df_new_hourly.index.tz_localize(
                None).floor('D').isin(complete_days)]
            self.increment_counter("M", "O", amount=1)

            df = prepare_dataframe(model,
                                   df_new_hourly,
                                   "value",
                                   6,
                                   lat,
                                   lon,
                                   timezone=timezone)
        except Exception as e:
            if "time" in df_new_hourly:
                df_new_hourly.drop("time", axis=1)
            mongo = MongoClient(self.config['mongodb']['host'],
                                self.config['mongodb']['port'])
            mongo[self.config['mongodb']['db']].authenticate(
                self.config['mongodb']['username'],
                self.config['mongodb']['password'])

            mongo[self.config['mongodb']['db']][
                self.config['module_config']['mongo_error']].replace_one(
                    {"modellingUnitId": modelling_unit}, {
                        "modellingUnitId": modelling_unit,
                        "model": "Error preparing dataframe",
                        "df": df_new_hourly.reset_index().to_dict('records'),
                        "multipliers": multipliers,
                        "lat": lat,
                        "lon": lon,
                        "timezone": timezone,
                        "exception": str(e),
                        "error": 2
                    },
                    upsert=True)
            mongo.close()
            return

        try:
            e_type = "electricity" if energy_type == "electricityConsumption" else "gas"
            df = df.set_index('time')
            count = df.groupby([df.index.year, df.index.month, df.index.day])
            complete_days = [
                datetime(*day).date() for day, x in count
                if x.count()['value'] >= whole_day_index
            ]
            df = df[df.index.tz_localize(None).floor('D').isin(complete_days)]
            self.increment_counter("M", "O", amount=1)
            model_linear = train_linear(model=model,
                                        type=e_type,
                                        dataframe=df,
                                        value_column="value",
                                        n_max=6,
                                        m_max=0,
                                        by_s=False)
            #save model to hbase
            self.increment_counter("M", "O", amount=1)
            model_linear = clean_linear(model_linear)
            pickle_model = pickle.dumps(model_linear)
            pickle_model = zlib.compress(pickle_model, 9)
            model_folder = self.config['module_config']['model_folder']
            try:
                call(["hadoop", "fs", "-mkdir", "-p", model_folder])
            except:
                pass
            model_file = NamedTemporaryFile()
            model_file.name = modelling_unit
            with open(model_file.name, 'wb') as f:
                f.write(
                    pickle_model
                )  # where `stuff` is, y'know... stuff to write (a string)
            call([
                "hadoop", "fs", "-copyFromLocal", model_file.name, model_folder
            ])
            self.increment_counter("M", "O", amount=1)
        except Exception as e:
            if "time" in df:
                df.drop("time", axis=1)

            mongo = MongoClient(self.config['mongodb']['host'],
                                self.config['mongodb']['port'])
            mongo[self.config['mongodb']['db']].authenticate(
                self.config['mongodb']['username'],
                self.config['mongodb']['password'])

            mongo[self.config['mongodb']['db']][
                self.config['module_config']['mongo_error']].replace_one(
                    {"modellingUnitId": modelling_unit},
                    {
                        "modellingUnitId": modelling_unit,
                        "model": "Error training model",
                        #"df": df_new_hourly.reset_index().to_dict('records'),
                        "multipliers": multipliers,
                        "lat": lat,
                        "lon": lon,
                        "timezone": timezone,
                        "exception": str(e),
                        "error": 3
                    },
                    upsert=True)
            mongo.close()
            return
Beispiel #17
0
    def post(self):
        """POST request handler. Processes form data"""

        # check if user uploaded an excel file
        uploaded_file = request.files['file']
        if uploaded_file and not (uploaded_file.filename.endswith('.xls')
                                  or uploaded_file.filename.endswith('.xlsx')):
            flash("Uploaded file is not an .xls or .xlsx file", "error")
            return redirect(url_for('index'))

        # save file to /tmp folder
        temp_file = NamedTemporaryFile()
        temp_file_name_w_extension = ntpath.basename(temp_file.name)
        temp_file.name = temp_file.name\
            .replace(temp_file_name_w_extension, uploaded_file.filename)
        uploaded_file.save(temp_file.name)

        # get output format
        output_format = request.form.get('format')
        output_ext = '.' + output_format
        # process output format and mime type for downloading
        post_process_to = None
        mime_type = 'text/html' if output_format == 'html'\
            else 'application/text'
        if output_format in ('pdf', 'doc'):
            post_process_to = output_format

        # convert uploaded file to html
        temp_html_file = NamedTemporaryFile()
        html_file_path = copy(temp_file.name).replace('.xlsx', '')\
            .replace('.xls', '') + '.' + 'html'
        temp_html_file.name = html_file_path

        # TODO 3: This hard-makes PPP conv to HTML. Change to doc if doc, etc.
        command_line = \
            self._build_pmix_ppp_tool_run_cmd(in_file_path=temp_file.name,
                                              out_format='html',
                                              out_file_path=html_file_path)
        _, stderr = self._run_background_process(command_line)

        # if pmix.ppp tool wrote something to stderr, we should show it to user
        if stderr:
            flash("STDERR:\n{}".format(stderr), "error")
            return redirect(url_for('index'))

        # output path now exists and refers to converted html file at /tmp
        pdf_doc_file_path = html_file_path

        # if output format is PDF or DOC
        if post_process_to == 'pdf':
            try:
                w_p = app_config.WKHTMLTOPDF_PATH_LOCAL
                pdf_doc_file_name, pdf_doc_file_path, mime_type = \
                    self._convert_to_pdf(_input=html_file_path,
                                         wkhtmltopdf_path=w_p)
            except OSError:
                try:
                    # w_p = app_config.WKHTMLTOPDF_PATH_SYSTEM
                    w_p = 'hello'
                    pdf_doc_file_name, pdf_doc_file_path, mime_type = \
                        self._convert_to_pdf(_input=html_file_path,
                                             wkhtmltopdf_path=w_p)
                except FileNotFoundError:
                    # TODO 4 - download and install a binary
                    raise Exception('hello there')
        elif post_process_to == 'doc':
            pdf_doc_file_name, pdf_doc_file_path, mime_type = \
                self._convert_to_doc(_input=html_file_path)

        # return file as response attachment, so browser will start download
        return send_file(pdf_doc_file_path,
                         as_attachment=True,
                         mimetype=mime_type,
                         attachment_filename=uploaded_file.filename.replace(
                             '.xlsx', output_ext).replace('.xls', output_ext))
    def get_template(self):

        template = {
            "AWSTemplateFormatVersion": "2010-09-09",
            "Description": "CodePipeline for " + str(self.project_name),
            "Parameters": {
                "Project": {
                    "Description": "The project code which owns this stack",
                    "Type": "String"
                },
                "ProjectDescription": {
                    "Description": "project description",
                    "Type": "String"
                },
                "DeploymentBucketName": {
                    "Description": "Logging bucket",
                    "Type": "String"
                },
                "Image": {
                    "Description": "Docker image",
                    "Type": "String"
                },
                "RepositoryName": {
                    "Description": "CodeCommit Repository Name",
                    "Type": "String"
                },
                "RepositoryBranchName": {
                    "Description": "CodeCommit Repository Branch Name",
                    "Type": "String"
                },
                "BuildServiceRole": {
                    "Description": "Code pipeline build service role",
                    "Type": "String"
                },
                "Subnets": {
                    "Type": "CommaDelimitedList"
                },
                "SecurityGroups": {
                    "Type": "CommaDelimitedList"
                },
                "VpcId": {
                    "Type": "String"
                },
                "BuildProjectName": {
                    "Type": "String"
                },
                "EnvironmentCode": {
                    "Type": "String"
                },
                "BuildspecFile": {
                    "Type": "String"
                }
            },
            "Resources": {
                "LogGroup": {
                    "Type": "AWS::Logs::LogGroup",
                    "DependsOn": "CodeBuildProject",
                    "Properties": {
                        "LogGroupName": {
                            "Fn::Join": [
                                "",
                                [
                                    "/aws/codebuild/", {
                                        "Ref": "BuildProjectName"
                                    }
                                ]
                            ]
                        },
                        "RetentionInDays": 90
                    }
                },
                "CodeBuildProject": {
                    "Type": "AWS::CodeBuild::Project",
                    "Properties": {
                        "Name": {
                            "Ref": "BuildProjectName"
                        },
                        "Description": {
                            "Ref": "ProjectDescription"
                        },
                        "ServiceRole": {
                            "Ref": "BuildServiceRole"
                        },
                        "Artifacts": {
                            "Type": "CODEPIPELINE"
                        },
                        "VpcConfig": {
                            "VpcId": {
                                "Ref": "VpcId"
                            },
                            "Subnets": {
                                "Ref": "Subnets"
                            },
                            "SecurityGroupIds": {
                                "Ref": "SecurityGroups"
                            }
                        },
                        "Environment": {
                            "Type":
                            "linuxContainer",
                            "ComputeType":
                            "BUILD_GENERAL1_SMALL",
                            "Image": {
                                "Ref": "Image"
                            },
                            "EnvironmentVariables": [{
                                "Name": "EnvCode",
                                "Value": {
                                    "Ref": "EnvironmentCode"
                                }
                            }]
                        },
                        "Source": {
                            "BuildSpec": {
                                "Ref": "BuildspecFile"
                            },
                            "Type": "CODEPIPELINE"
                        },
                        "TimeoutInMinutes":
                        60,
                        "Tags": [{
                            "Key": "Name",
                            "Value": {
                                "Fn::Join": ["-", [{
                                    "Ref": "AWS::StackName"
                                }]]
                            }
                        }]
                    }
                },
                "Pipeline": {
                    "Type": "AWS::CodePipeline::Pipeline",
                    "Properties": {
                        "RoleArn": {
                            "Ref": "BuildServiceRole"
                        },
                        "ArtifactStore": {
                            "Type": "S3",
                            "Location": {
                                "Ref": "DeploymentBucketName"
                            }
                        },
                        "Stages": [{
                            "Name":
                            "Source",
                            "Actions": [{
                                "Name":
                                "SourceAction",
                                "ActionTypeId": {
                                    "Category": "Source",
                                    "Owner": "AWS",
                                    "Version": "1",
                                    "Provider": "CodeCommit"
                                },
                                "OutputArtifacts": [{
                                    "Name":
                                    "CodePipelineSourceOutputArtifact"
                                }],
                                "Configuration": {
                                    "BranchName": {
                                        "Ref": "RepositoryBranchName"
                                    },
                                    "RepositoryName": {
                                        "Ref": "RepositoryName"
                                    }
                                },
                                "RunOrder":
                                1
                            }]
                        }, {
                            "Name":
                            "Build",
                            "Actions": [{
                                "Name":
                                "BuildAction",
                                "InputArtifacts": [{
                                    "Name":
                                    "CodePipelineSourceOutputArtifact"
                                }],
                                "ActionTypeId": {
                                    "Category": "Build",
                                    "Owner": "AWS",
                                    "Version": 1,
                                    "Provider": "CodeBuild"
                                },
                                "Configuration": {
                                    "ProjectName": {
                                        "Ref": "CodeBuildProject"
                                    }
                                },
                                "OutputArtifacts": [{
                                    "Name":
                                    "CodePipelineBuildOutputArtifact"
                                }],
                                "RunOrder":
                                1
                            }]
                        }]
                    }
                }
            }
        }

        f = NamedTemporaryFile(delete=False)

        # Save original name (the "name" actually is the absolute path)
        original_path = f.name

        if self.debug:
            print('original_path: ' + str(original_path))
            print('path: ' + str(os.path.dirname(original_path)))
        # Change the file name to something
        f.name = str(os.path.dirname(original_path)) + '/myfilename.json'

        with open(f.name, 'w') as file:
            file.write(json.dumps(template))
        file.close()

        if self.debug:
            print('##########################')
            print('cwd: ' + str(self.cwd))
            print('creating template.json')
            print('##########################')

        if not os.path.exists(self.cwd + '/template.json'):

            with open(self.cwd + '/template.json', 'w') as file:
                file.write(json.dumps(template))
            file.close()
        else:
            if self.debug:
                print('Not creating template.json')

        return f.name