def on_post(self, req, resp):

        try:
            q_id = req.get_param('quotation_id')
            for item in req.params:
                if item == 'quotationUploader':
                    fileitem = req.get_param(item)
                    if (fileitem.file):
                        fn = os.path.basename(fileitem.filename)
                        file_name_without_ext = os.path.splitext(fn)[0]
                        extension = os.path.splitext(fn)[1]
                        new_file_name = q_id
                        append_to_new_file_name = new_file_name + extension
                        path = "%s/%s" % (q_id, append_to_new_file_name)
                        database_connection = get_db_connection()
                        cursor = database_connection.cursor()
                        cursor.execute(
                            "select config_value from config where config_key = 's3-bucket-quotation'"
                        )
                        bucket = cursor.fetchone()
                        BUCKET_NAME = bucket['config_value']
                        s3 = aws_cred.aws_s3(BUCKET_NAME)

                        s3.Bucket(BUCKET_NAME).put_object(Key=path,
                                                          Body=fileitem.file,
                                                          ACL='public-read')

                        cursor.execute(
                            "update ship_quotations set file_name = '" +
                            append_to_new_file_name + "' \
                                        where quotation_id = '" + q_id + "'")
                        database_connection.commit()
                        if cursor.rowcount > 0:
                            resp.status = falcon.HTTP_200
                            message = {
                                'Status': 'All files uploaded successfully'
                            }
                            result_json = json.dumps(message)
                            resp.body = (result_json)
                        else:
                            resp.status = falcon.HTTP_400
                            message = {'Status': 'Failed to upload all files'}
                            result_json = json.dumps(message)
                            resp.body = (result_json)

        except ValueError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except Exception as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        finally:
            cursor.close()
            database_connection.close()
    def on_post(self, req, resp):
        
        try:
            order_id = req.get_param('order_id')
            upload_count = 0
            list_of_uploads = 0
            for item in req.params:
                if item=='quotationUploader':
                    list_of_uploads += 1
                    fileitem = req.get_param(item)
                    if(fileitem.file):
                        fn = os.path.basename(fileitem.filename)
                        file_name_without_ext = os.path.splitext(fn)[0]
                        extension = os.path.splitext(fn)[1]
                        new_file_name = order_id
                        current_datetime = time.strftime("%y%m%d-%H%M%S")
                        append_to_new_file_name = new_file_name+'_'+current_datetime+extension
                        path = "%s/%s"%(order_id,append_to_new_file_name)
                        database_connection = get_db_connection()
                        cursor = database_connection.cursor()
                        cursor.execute("select config_value from config where config_key = 's3-bucket-customdoc'")
                        bucket = cursor.fetchone()
                        BUCKET_NAME = bucket['config_value']
                        s3 = aws_cred.aws_s3(BUCKET_NAME)
                        s3.Bucket(BUCKET_NAME).put_object(Key=path, Body=fileitem.file, ACL='public-read')
                        cursor.execute("update ship_custom_agent_documents set file_name = '"+append_to_new_file_name+"' \
                                        where order_id = '" + order_id + "'")
                        database_connection.commit()
                
            if (upload_count == list_of_uploads):
                resp.status = falcon.HTTP_200
                message = {'UploadCount': str(upload_count), 'Status': 'All files uploaded successfully'}
                result_json = json.dumps(message)
                resp.body = (result_json)
            else:
                resp.status = falcon.HTTP_400
                message = {'UploadCount': str(upload_count), 'Status': 'Failed to upload all files'}
                result_json = json.dumps(message)
                resp.body = (result_json)

        except ValueError as err:
            raise falcon.HTTPError(falcon.HTTP_400, traceback.print_exc(file=sys.stdout) , err.args)
        except Exception as err:
            raise falcon.HTTPError(falcon.HTTP_400, traceback.print_exc(file=sys.stdout) , err.args)
        finally:
            cursor.close()
            database_connection.close()
Beispiel #3
0
    def on_post(self, req, resp):
        try:
            order_id = req.get_param('orderId')
            for item in req.params:
                if item == 'documentUploader':
                    fileitem = req.get_param(item)
                    if (fileitem.file):
                        fn = os.path.basename(fileitem.filename)
                        file_name_without_ext = os.path.splitext(fn)[0]
                        extension = os.path.splitext(fn)[1]
                        current_datetime = time.strftime("%Y%m%d%H%M%S")
                        append_to_new_file_name = file_name_without_ext + '_' + current_datetime + extension
                        path = "%s/%s" % (order_id, append_to_new_file_name)
                        database_connection = get_db_connection()
                        cursor = database_connection.cursor()
                        cursor.execute(
                            "select config_value from config where config_key = 's3-bucket-customdoc'"
                        )
                        BUCKET_NAME = cursor.fetchone()
                        s3 = aws_cred.aws_s3(BUCKET_NAME['config_value'])
                        s3.Bucket(BUCKET_NAME['config_value']).put_object(
                            Key=path, Body=fileitem.file, ACL='public-read')
                        args = (append_to_new_file_name, order_id)
                        cursor.execute(
                            "insert into ship_custom_documents(file_name,order_id) \
                                        values(%s,%s)", args)
                        database_connection.commit()
            if cursor.rowcount is 1:
                resp.status = falcon.HTTP_200
                resp.body = ("200")
            else:
                resp.status = falcon.HTTP_204
                resp.body = ("204")

        except ValueError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except Exception as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        finally:
            cursor.close()
            database_connection.close()
 def on_patch(self, req, resp, login_id, session):
     # Authenticate login id and session availability.
     try:
         if (MemcacheFunctions.IsSessionValid(login_id, session) is False):
             resp.status = falcon.HTTP_401
             Error = {
                 "Reason": "Invalid Login Credentials or Session is Expired"
             }
             result_json = json.dumps(Error)
             resp.body = result_json
             return
     except ValueError as err:
         raise falcon.HTTPError(falcon.HTTP_400,
                                traceback.print_exc(file=sys.stdout),
                                err.args)
     except Exception as ex:
         raise falcon.HTTPError(falcon.HTTP_400, 'Error', ex.args)
     try:
         # Reading Json from Reslet
         raw_json = req.stream.read(req.content_length or 0).decode('utf-8')
         resultdict_json = json.loads(raw_json,
                                      object_pairs_hook=OrderedDict,
                                      encoding='utf-8')
         # Converting Json to List
         list_values = [v for v in resultdict_json.values()]
         profile_documents_id = list_values[0]
         # Connecting the database
         database_connection = get_db_connection()
         cursor = database_connection.cursor()
         cursor.execute(
             "select config_value from config where config_key = 's3-bucket-profiledoc' "
         )
         bucket = cursor.fetchone()
         BUCKET_NAME = bucket['config_value']
         s3 = aws_cred.aws_s3(BUCKET_NAME)
         cursor.execute(
             "select file_name from profile_documents where profile_documents_id ="
             + str(profile_documents_id) + "")
         row = cursor.fetchone()
         obj = s3.Object(BUCKET_NAME,
                         os.path.join(login_id, row['file_name']))
         obj.delete()
         # Delete profile_documents_id
         cursor.execute(
             "delete profile_documents from profile_documents where profile_documents_id ="
             + str(profile_documents_id) + "")
         database_connection.commit()
         if (cursor.rowcount == 1):
             resp.status = falcon.HTTP_200
             resp.body = ("Delete Succesfull")
         else:
             resp.status = falcon.HTTP_204
             resp.body = ("Failed To Delete")
     except ValueError as err:
         raise falcon.HTTPError(falcon.HTTP_400,
                                traceback.print_exc(file=sys.stdout),
                                err.args)
     except Exception as err:
         raise falcon.HTTPError(falcon.HTTP_400,
                                traceback.print_exc(file=sys.stdout),
                                err.args)
     finally:
         cursor.close()
         database_connection.close()
    def on_post(self, req, resp, login_id, session):
        # Authenticate login id and session availability.
        try:
            if (MemcacheFunctions.IsSessionValid(login_id, session) is False):
                resp.status = falcon.HTTP_401
                Error = {
                    "Reason": "Invalid Login Credentials or Session is Expired"
                }
                result_json = json.dumps(Error)
                resp.body = result_json
                return
        except ValueError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except Exception as ex:
            raise falcon.HTTPError(falcon.HTTP_400, 'Error', ex.args)

        try:
            database_connection = get_db_connection()
            cursor = database_connection.cursor()
            for item in req.params:
                if item == 'documentUploader':
                    fileitem = req.get_param(item)
                    if (fileitem.file):
                        current_datetime = time.strftime("%y%m%d-%H%M%S")
                        append_to_new_file_name = os.path.basename(
                            fileitem.filename)
                        file_name_without_ext = os.path.splitext(
                            append_to_new_file_name)[0]
                        extension = os.path.splitext(
                            append_to_new_file_name)[1]
                        new_file_name = file_name_without_ext + '_' + current_datetime + extension
                        path = "%s/%s" % (login_id, new_file_name)
                        cursor.execute(
                            "select config_value from config where config_key = 's3-bucket-profiledoc' "
                        )
                        bucket = cursor.fetchone()
                        BUCKET_NAME = bucket['config_value']
                        s3 = aws_cred.aws_s3(BUCKET_NAME)
                        s3.Bucket(BUCKET_NAME).put_object(Key=path,
                                                          Body=fileitem.file,
                                                          ACL='public-read')
                        database_connection = get_db_connection()
                        cursor = database_connection.cursor()
                        args = (new_file_name, login_id)
                        cursor.execute(
                            "insert into profile_documents(file_name,login_id) \
                                        values(%s,%s)", args)
                        database_connection.commit()
            if cursor.rowcount is 1:
                resp.status = falcon.HTTP_200
                resp.body = ("200")
            else:
                resp.status = falcon.HTTP_204
                resp.body = ("204")

        except ValueError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except Exception as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        finally:
            cursor.close()
            database_connection.close()
    def on_get(self, req, resp, login_id, session, order_id):
        try:
            # Authenticate login id and session availability.
            if (MemcacheFunctions.IsSessionValid(login_id, session) is False):
                resp.status = falcon.HTTP_401
                Error = {
                    "Reason": "Invalid Login Credentials or Session is Expired"
                }
                result_json = json.dumps(Error)
                resp.body = result_json
                return
        except ValueError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except Exception as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)

        try:
            database_connection = get_db_connection()
            cursor = database_connection.cursor()
            cursor.execute(
                "select file_name from ship_custom_documents where order_id = '"
                + order_id + "' ")
            row = cursor.fetchall()
            filename = row
            cursor.execute(
                "select config_value from config where config_key = 's3-bucket-customdoc'"
            )
            bucket = cursor.fetchone()
            BUCKET_NAME = bucket['config_value']
            s3 = aws_cred.aws_s3(BUCKET_NAME)
            config_path = {}
            # Getting Absolute Path Of Quotation's from DB
            # Tools.py - Contains GetCustomFilePath() to extract Paths from config table
            config_path = Tools.GetCustomFilePath()
            # os.chdir(config_path['custom_files'])
            # Changing Working Directory to Custom Path
            sys.path.append(config_path['custom_files'])
            if (len(filename) > 1):
                cursor.execute(
                    "select config_value from config where config_key = 'serving-url-link'"
                )
                row = cursor.fetchall()
                # Getting File-Name
                serve = row[0]['config_value']
                os.mkdir(os.path.join(config_path['custom_files'], order_id))
                for x in filename:
                    print(x)
                    s3.Object(BUCKET_NAME, '%s/%s' %
                              (order_id, x['file_name'])).download_file(
                                  os.path.join(config_path['custom_files'],
                                               order_id, x['file_name']))
                # Joining Custom Path with OrderID for filname.zip
                zipfilepath1 = os.path.join(config_path['custom_files'],
                                            'temp', order_id)
                zipfilepath = zipfilepath1 + ".zip"
                # Writing Files into ZIP
                with zipfile.ZipFile(zipfilepath, 'w') as myzip:
                    for f in filename:
                        os.chdir(
                            os.path.join(config_path['custom_files'],
                                         order_id))
                        myzip.write(f['file_name'])
                # Joining ServeURL , API, and Filename
                # For API_PATH Declaration Please Refer IntugloAPP
                # http://localhost:8000/custom*
                #        *SERVE_URL*   *API_PATH*
                result_json = json.dumps(
                    os.path.join(serve, 'custom_zip', order_id + '.zip'))
                print(result_json)
                resp.status = falcon.HTTP_200
                resp.body = result_json
            elif (len(filename) is 1):
                cursor.execute(
                    "select config_value from config where config_key = 'serving-url-s3'"
                )
                row = cursor.fetchall()
                # Getting File-Name
                serve = row[0]['config_value']
                # Joining ServeURL , API, and Filename
                # For API_PATH Declaration Please Refer IntugloAPP
                # http://localhost:8000/custom*
                #        *SERVE_URL*   *API_PATH*
                result_json = json.dumps(
                    os.path.join(*[
                        serve, BUCKET_NAME, order_id, filename[0]['file_name']
                    ]))
                print(result_json)
                resp.status = falcon.HTTP_200
                resp.body = result_json
            else:
                resp.status = falcon.HTTP_204
                resp.body = ("204")
        except ValueError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except pymysql.IntegrityError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except Exception as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        finally:
            cursor.close()
            database_connection.close()
            shutil.rmtree(os.path.join(config_path['custom_files'], order_id),
                          ignore_errors=True)
Beispiel #7
0
    def on_post(self, req, resp):

        try:
            user_id = req.get_param('login_id')
            user_type = req.get_param('userType')
            upload_count = 0
            list_of_uploads = 0
            for item in req.params:
                if item == 'profileUploader':
                    print("Entering IF")
                    print(user_type)
                    list_of_uploads += 1
                    fileitem = req.get_param(item)
                    if (fileitem.file):
                        fn = os.path.basename(fileitem.filename)
                        file_name_without_ext = os.path.splitext(fn)[0]
                        extension = os.path.splitext(fn)[1]
                        print(os.path.splitext(fn))
                        database_connection = get_db_connection()
                        cursor = database_connection.cursor()
                        cursor.execute(
                            "select config_value from config where config_key = 's3-bucket-profilepic' "
                        )
                        bucket = cursor.fetchone()
                        BUCKET_NAME = bucket['config_value']
                        s3 = aws_cred.aws_s3(BUCKET_NAME)
                        # cursor.execute("select merchant_id from logins where login_id= '"+str(user_id)+"'")
                        # merchant = cursor.fetchone()
                        # new_file_name = merchant['merchant_id']
                        current_datetime = time.strftime("%Y%m%d%H%M%S")
                        append_to_new_file_name = user_id + current_datetime + extension

                        if user_type == '1':
                            print("inside supplier")
                            cursor.execute(
                                "select logo from logistic_providers where login_id ='"
                                + str(user_id) + "'")
                            logo = cursor.fetchone()
                        elif user_type == '2':
                            print("inside customer")
                            cursor.execute(
                                "select logo from customers where login_id ='"
                                + str(user_id) + "'")
                            logo = cursor.fetchone()
                        elif user_type == '3':
                            print("inside custom agent")
                            cursor.execute(
                                "select logo from custom_agent where login_id ='"
                                + str(user_id) + "'")
                            logo = cursor.fetchone()

                        if logo['logo'] != 'default.jpg':
                            obj = s3.Object(BUCKET_NAME,
                                            os.path.join(logo['logo']))
                            obj.delete()
                            s3.Bucket(BUCKET_NAME).put_object(
                                Key=append_to_new_file_name,
                                Body=fileitem.file,
                                ACL='public-read')
                        else:
                            s3.Bucket(BUCKET_NAME).put_object(
                                Key=append_to_new_file_name,
                                Body=fileitem.file,
                                ACL='public-read')

                        if user_type == '1':
                            print("inside supplier")
                            cursor.execute(
                                "update logistic_providers set logo = '" +
                                append_to_new_file_name +
                                "' where login_id = '" + user_id + "'")
                            database_connection.commit()
                        elif user_type == '2':
                            print("inside customer")
                            cursor.execute("update customers set logo = '" +
                                           append_to_new_file_name +
                                           "' where login_id = '" + user_id +
                                           "'")
                            database_connection.commit()
                        elif user_type == '3':
                            print("inside custom agent")
                            cursor.execute("update custom_agent set logo = '" +
                                           append_to_new_file_name +
                                           "' where login_id = '" + user_id +
                                           "'")
                            database_connection.commit()

            if cursor.rowcount > 0:
                resp.status = falcon.HTTP_200
                message = {'Status': 'Profile Picture uploaded successfully'}
                result_json = json.dumps(message)
                resp.body = (result_json)
            else:
                resp.status = falcon.HTTP_400
                message = {'Status': 'Failed to upload profile picture'}
                result_json = json.dumps(message)
                resp.body = (result_json)

        except ValueError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except Exception as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        finally:
            cursor.close()
            database_connection.close()
Beispiel #8
0
    def on_patch(self, req, resp, login_id, session):
        #Authenticate login id and session availability.
        try:
            if (MemcacheFunctions.IsSessionValid(login_id, session) is False):
                resp.status = falcon.HTTP_401
                Error = {
                    "Reason": "Invalid Login Credentials or Session is Expired"
                }
                result_json = json.dumps(Error)
                resp.body = result_json
                return
        except ValueError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except Exception as err:
            raise falcon.HTTPError(falcon.HTTP_400, 'Error', err.args)

        try:
            raw_json = req.stream.read(req.content_length or 0).decode('utf-8')
            resultdict_json = json.loads(raw_json,
                                         object_pairs_hook=OrderedDict,
                                         encoding='utf-8')
            # Converting Json to List
            list_values = [v for v in resultdict_json.values()]
            quotation_id = list_values[0]
            database_connection = get_db_connection()
            cursor = database_connection.cursor()
            cursor.execute(
                "select config_value from config where config_key = 's3-bucket-quotation' "
            )
            bucket = cursor.fetchone()
            BUCKET_NAME = bucket['config_value']
            s3 = aws_cred.aws_s3(BUCKET_NAME)
            cursor.execute(
                "select file_name from ship_quotations where quotation_id = '"
                + quotation_id + "'")
            row = cursor.fetchone()
            if (cursor.rowcount > 0):
                obj = s3.Object(BUCKET_NAME,
                                os.path.join(quotation_id, row['file_name']))
                obj.delete()
                cursor.execute(
                    "update ship_quotations set file_name = '' where quotation_id = '"
                    + quotation_id + "' ")
                database_connection.commit()
                resp.status = falcon.HTTP_200
                message = {"Update": "File is removed"}
                result_json = json.dumps(message)
                resp.body = result_json
            else:
                resp.status = falcon.HTTP_204
                message = {"Error": "Quotation is not found"}
                result_json = json.dumps(message)
                resp.body = result_json
        except OSError as err:  ## if failed, report it back to the user ##
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except ValueError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except pymysql.IntegrityError as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        except Exception as err:
            raise falcon.HTTPError(falcon.HTTP_400,
                                   traceback.print_exc(file=sys.stdout),
                                   err.args)
        finally:
            cursor.close()
            database_connection.close()