Exemple #1
0
class UserModel(DB.Model):
    """ User model has user related functionality such as saving and retrieving from db"""

    __tablename__ = "user"

    id = DB.Column(DB.Integer, primary_key=True)
    email = DB.Column(DB.String(80))
    password = DB.Column(DB.String(80))

    def __init__(self, email: str, password: str):
        self.email = email
        self.password = password

    def save_to_db(self):
        """ Saves (UserModel) object to database"""
        DB.session.add(self)
        DB.session.commit()

    def to_json(self) -> dict:
        """ returns dict representation of (UserModel)"""
        return {"id": self.id, "email": self.email, "password": self.password}

    @classmethod
    def find_by_email(cls, email: str) -> "UserModel":
        """Gets user with given email from database
        :param email(String) email of the user
        """
        return cls.query.filter_by(email=email).first() or None

    @classmethod
    def find_by_id(cls, _id: int) -> "UserModel":
        """Gets user with given id from database
        :param _id(int) id of the user
        """
        return cls.query.filter_by(id=_id).first() or None
Exemple #2
0
 def test_a_models_and_utils(self):
     """
     test OneHundredReport model
     """
     logging.info('\nStart model test..')
     inst = OneHundredReport(**self._data)
     DB.add(inst)
     res = DB.query(OneHundredReport).all()
     model = res.pop()
     self.assertEqual(str(model), '1_1_2010-05-01')
     self.assertDictEqual(model_to_dict(model), \
         {'REGN': 1, 'PLAN': 'А', 'NUM_SC': '10207', 'A_P': '2', 'VR': 23064358, 'VV':\
         0, 'VITG': 23064358.0, 'ORA': 0, 'OVA': 0, 'OITGA': 0.0, 'ORP': 0, 'OVP': 0,\
         'OITGP': 0.0, 'IR': 23064358, 'IV': 0, 'IITG': 23064358.0, 'DT':\
         '2010-05-01', 'PRIZ': 1, 'P_K':1})
Exemple #3
0
def export_data():

    n = ("Beginning export process. Writing files to {}.".format(
        APPLICATION_PATH + EXPORT_FILE_PATH))

    yield ("<p>" + n)

    # db = DataSet('sqlite:///' + DATABASE_PATH)
    db = DataSet(DB.dataset_connection())

    if os.path.isdir(APPLICATION_PATH + EXPORT_FILE_PATH) is False:
        os.makedirs(APPLICATION_PATH + EXPORT_FILE_PATH)

    with db.transaction():

        for table_name in db.tables:

            if not table_name.startswith("page_search"):

                table = db[table_name]
                n = "Exporting table: " + table_name

                yield ('<p>' + n)
                filename = APPLICATION_PATH + EXPORT_FILE_PATH + '/dump-' + table_name + '.json'
                table.freeze(format='json', filename=filename)

    db.close()

    n = "Export process ended. <a href='{}'>Click here to continue.</a>".format(
        BASE_URL)

    yield ("<p>" + n)
Exemple #4
0
 def test_one_hundred_report_endpoint(self):
     """
     test for one_hundred_report
     """
     logging.info('\nStart endpoint test..')
     DB.add(OneHundredReport(**self._data))
     resp = json.loads(self._client.get('/one_hundred_report/1').\
         get_data().decode())
     self.assertListEqual(
         [{'A_P': '2', 'DT': '2010-05-01T00:00:00', 'IITG': 23064358, 'IR':\
         23064358.0, 'IV': 0.0, 'NUM_SC': '10207', 'OITGA': 0.0, 'OITGP': 0.0, 'ORA':\
         0.0, 'ORP': 0.0, 'OVA': 0.0, 'OVP': 0.0, 'PLAN': 'А', 'PRIZ': 1, 'P_K': 1,\
         'REGN': 1, 'VITG': 23064358.0, 'VR': 23064358.0, 'VV': 0.0}, {'A_P': '2', 'DT':\
         '2010-05-01T00:00:00', 'IITG': 23064358, 'IR': 23064358.0, 'IV': 0.0, 'NUM_SC':\
         '10207', 'OITGA': 0.0, 'OITGP': 0.0, 'ORA': 0.0, 'ORP': 0.0, 'OVA': 0.0, 'OVP':\
         0.0, 'PLAN': 'А', 'PRIZ': 1, 'P_K': 2, 'REGN': 1, 'VITG': 23064358.0, 'VR':
         23064358.0, 'VV': 0.0}], resp)
Exemple #5
0
def import_data():
    from settings import DB
    n = []
    n.append("Beginning import process.")
    # yield "<p>" + n

    n.append("Cleaning DB.")
    # yield "<p>" + n
    try:
        DB.clean_database()
    except:
        from core.models import init_db
        init_db.recreate_database()
        DB.remove_indexes()

    n.append("Clearing tables.")
    # yield "<p>" + n

    xdb = DataSet(DB.dataset_connection())

    # with xdb.atomic() as txn:
    try:
        with xdb.transaction():
            for table_name in xdb.tables:
                n.append("Loading table " + table_name)
                # yield "<p>" + n
                try:
                    table = xdb[table_name]
                except:
                    n.append("<p>Sorry, couldn't create table ", table_name)
                else:
                    filename = (APPLICATION_PATH + EXPORT_FILE_PATH +
                                '/dump-' + table_name + '.json')
                    if os.path.exists(filename):
                        try:
                            table.thaw(format='json',
                                       filename=filename,
                                       strict=True)
                        except Exception as e:
                            n.append("<p>Sorry, error:{}".format(e))

                    else:
                        n.append("No data for table " + table_name)
                        # yield "<p>" + n
    except Exception as e:
        n.append('Ooops: {}'.e)
    else:
        xdb.query(DB.post_import())
        xdb.close()
        DB.recreate_indexes()
        n.append(
            "Import process ended. <a href='{}'>Click here to continue.</a>".
            format(BASE_URL))
    return '<p>'.join(n)
Exemple #6
0
 def test_03_query_corp(self):
     Browser.dr.get(self.base_url)
     self.page.search_querydropdownbtn().click()
     self.page.search_querycropcodeinp().send_keys('001')
     self.page.search_selectcropstatusinput().click()
     self.page.crop_status_options()[1].click()
     self.page.serach_querybtn().click()
     with DB() as dbcur:
         sql = "select * from corp where code = '001';"
         count = dbcur.execute(sql)
     li_tr = self.page.query_corptr()
     logger.info(pytest.assume(len(li_tr) == count))
Exemple #7
0
def import_data():

    n = ("Beginning import process.")

    yield "<p>" + n

    DB.clean_database()

    xdb = DataSet(DB.dataset_connection())

    xdb.query(DB.pre_import(), commit=False)

    with xdb.transaction() as txn:

        for table_name in xdb.tables:

            xdb.query('DELETE FROM `{}`;'.format(table_name), commit=True)

    with xdb.transaction() as txn:

        for table_name in xdb.tables:

            n = ("Loading table " + table_name)

            yield "<p>" + n

            try:
                table = xdb[table_name]
            except:
                yield ("<p>Sorry, couldn't create table ", table_name)
            else:

                filename = (APPLICATION_PATH + EXPORT_FILE_PATH + '/dump-' +
                            table_name + '.json')
                if os.path.exists(filename):

                    table.thaw(format='json', filename=filename, strict=True)

                else:
                    n = ("No data for table " + table_name)
                    yield "<p>" + n

    xdb.query(DB.post_import())

    xdb.close()

    DB.recreate_indexes()

    n = "Import process ended. <a href='{}'>Click here to continue.</a>".format(
        BASE_URL)

    yield "<p>" + n

    from core.routes import app
    app.reset()
def opt8():
    ''' Realiza o backup do Banco de Dados '''
    subprocess.call('clear')
    name = raw_input('Nome do arquivo de Backup: ')
    dest = raw_input('Salvar em: ')
    try:
        if dest[-1] != '/':
            dest = dest + '/'
        bckfile = ''.join([dest, name, '-', datetime.today().strftime('%d-%m-%Y-%H-%M-%S'), '.tar'])
        with tarfile.open(bckfile, 'a') as tf:
            tf.add('{0}{1}'.format(INSTALL_PATH, DB), DB.split('/')[-1])
            print 'Backup realizado com sucesso.\n{0}'.format(bckfile)
    except:
        print 'Falha ao gerar arquivo de backup.'
Exemple #9
0
def site_search_results(request, site=None):

    try:
        search_terms = request.query['search']
    except KeyError:
        raise KeyError('No search field in query.')

    if search_terms == "":
        raise ValueError('Search field is empty.')

    search_terms_enc = utf8_escape(search_terms)
    pages_searched = DB.site_search(search_terms_enc, site)

    return pages_searched, search_terms
Exemple #10
0
def site_search_results(request, site=None):

    try:
        search_terms = request.query['search']
    except KeyError:
        raise KeyError('No search field in query.')

    if search_terms == "":
        raise ValueError('Search field is empty.')

    search_terms_enc = utf8_escape(search_terms)
    pages_searched = DB.site_search(search_terms_enc, site)

    return pages_searched, search_terms
Exemple #11
0
def parse(arg, DEBUG=False):
    # define DB
    db = DB(DEBUG)

    # load revisions from database and store in format
    # {id1: [revision1, revison2], id2: [rev3] ... }
    revisions = defaultdict(list, get_revisions(db))

    # buffers for accumulating processed data before save
    insertList = []
    updateList = []

    # iterating through generator
    for row in reader(arg):
        uid = row['uid']

        # creating hash from processed record
        # duplicates will have same hash, so we can ignore them
        row_hash = hash_from_dict(row)

        # check if hash in revisions
        if uid in revisions and row_hash in revisions[uid]:
            continue

        # write hash to our record, so we will able to extract in during next parse
        row['revisions'] = [row_hash]

        if uid in revisions:
            updateList.append(row)
        else:
            insertList.append(row)

        # add hash to revision, so we can omit following duplicates of current parse
        revisions[uid].append(row_hash)

        # save data to DB when localstorage reaches chunk size
        if CHANK_SIZE in [len(insertList), len(updateList)]:
            save(db, insertList, updateList)
            insertList.clear()
            updateList.clear()

    # saving the rest of the data (if any) to DB
    save(db, insertList, updateList)
Exemple #12
0
def create_tables():
    """create local database if it doesn't exist"""
    if ENV == "local":
        DB.create_all()
Exemple #13
0
        result = db.products.update_one({'uid': item['uid']}, query)


def get_revisions(db):
    products = db.products
    if products.count() > 0:
        return dict([(i['uid'], i['revisions']) for i in \
            products.find({}, {'uid': 1, 'revisions': 1, '_id': 0})])
    else:
        return {}


def drop_table(db):
    db.products.drop()


def create_index(db):
    db.products.create_index([('uid', ASCENDING)], unique=True)


# You can use this script for clearing DB and creating indexes
# python db.py 1      - testing DataBase
# python db.py 0      - production DataBase
if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("debug")
    args = parser.parse_args()

    arg = False if args.debug in ['0', 0, 'False', 'false', ''] else True
    drop_table(DB(arg))
    create_index(DB(arg))
Exemple #14
0
def get_next_pk(sequence_name):
    return DB.command('findAndModify', 'sequences',
        query={'_id': sequence_name}, update={'$inc': {'pk': 1}},
        new=True)['value']['pk']
import os
from settings import DB
from lqueries import clear_agg_sql
from lqueries import clear_hex_sql
from lqueries import clear_results_hex_sql
from lqueries import clear_totals_hex_sql
from lqueries import clear_preprocessed_sql
from lqueries import update_hex_seq

# Restrict visible functions
__all__ = ['init']

# LOCAL PATHS
cwd = os.path.dirname(__file__)
scripts_path = os.path.join(cwd, '../scripts/DB')
dbname = DB.split('/')[-1]

# ####################### HELPER FUNCTIONS ################################


def load(arr):
    '''load csvfiles into postgres table
    it expects an array of tuples in the following format
    ('table_name', 'path', 'csv_fname') or
    ('table_name', 'path') assumes the csv_fname is the same as the table name
        path: relative to the data folder
    '''
    cmd = '%(path)s/load_pg_table.sh %(db)s %(table)s %(csv_path)s'
    with lcd(cwd):
        for t in arr:
            abs_path = os.path.join(cwd, '../data', t[1])
Exemple #16
0
def db_recreate(table_name='FileInfo'):
    '''
    Utility function for recreating a database table after changes
    '''

    from core import models
    table_model = getattr(models, table_name)
    table_ref = table_model._meta.db_table

    from settings import DB, EXPORT_FILE_PATH
    from core.libs.playhouse.dataset import DataSet

    n = []

    n.append("Beginning export process... Writing files to {}.".format(APPLICATION_PATH + EXPORT_FILE_PATH))

    xdb = DataSet(DB.dataset_connection())

    if os.path.isdir(APPLICATION_PATH + EXPORT_FILE_PATH) is False:
        os.makedirs(APPLICATION_PATH + EXPORT_FILE_PATH)
    with xdb.transaction():
        table = xdb[table_ref]
        n.append("Exporting table: " + table_name)
        filename = APPLICATION_PATH + EXPORT_FILE_PATH + '/dump-' + table_ref + '.json'
        table.freeze(format='json', filename=filename)
    xdb.close()

    n.append("Export process ended.")

    with db.atomic():
        db.drop_tables((table_model,),
            safe=True)
        db.create_tables((table_model,),
            safe=False)

    try:
        with xdb.transaction():
            n.append("Loading table " + table_name)
            try:
                table = xdb[table_ref]
            except:
                n.append("<p>Sorry, couldn't create table ", table_name)
            else:
                filename = (APPLICATION_PATH + EXPORT_FILE_PATH +
                    '/dump-' + table_ref + '.json')
                if os.path.exists(filename):
                    try:
                        table.thaw(format='json',
                            filename=filename,
                            strict=True)
                    except Exception as e:
                        n.append("<p>Sorry, error:{}".format(e))

                else:
                    n.append("No data for table " + table_name)
    except Exception as e:
        n.append('Ooops: {}'.e)
    else:
        xdb.query(DB.post_import())
        xdb.close()
        # DB.recreate_indexes()
        n.append("Import process ended.")

    return ('<p>'.join(n))
Exemple #17
0
from core.error import LoggedException
from functools import wraps
from core.libs.bottle import request
from core.libs.peewee import OperationalError
from core.models import db
from settings import DB, DATABASE_TIMEOUT, RETRY_INTERVAL
from sys import exc_info
from time import clock, sleep

DBError, error_text = DB.db_warnings()

# what we might want to do is instead subclass transaction
# so we can use it without the decorator context


def transaction(func):
    @wraps(func)
    def wrapper(*a, **ka):
        db.connect()
        db.execute_sql('PRAGMA journal_mode=WAL;')
        db.execute_sql('PRAGMA busy_timeout=30000;')

        start = clock()
        while 1:
            try:
                with db.transaction():
                    fn = func(*a, **ka)
            except LoggedException as e:
                db.rollback()
                raise exc_info()[0](e.msg)
            except DBError as e:
Exemple #18
0
_init_logging(CONSOLE_FORMAT, CONSOLE_DATE_FORMAT, LOGGING_LEVEL)
_configure_app(APP_CONFIGURATION)

logger = logging.getLogger(__name__)
api = Api(app)
jwt = JWTManager(app)


@jwt.expired_token_loader
def expired_token_handler_callback(expired_token):
    """set response status and body for when jwt token expires"""
    token_type = expired_token["type"]
    return jsonify({
        "status": 401,
        "message": f"{token_type}_token_expired"
    }), 401


# routes
api.add_resource(RegisterUser, "/register")
api.add_resource(LoginUser, "/authenticate")

api.add_resource(GetTodos, "/todos")
api.add_resource(PostTodos, "/todo")
api.add_resource(ModifyTodos, "/todo/<string:todo_id>")

if __name__ == "__main__":
    debug = ENV != "prod"
    DB.init_app(app)
    app.run(port=PORT, debug=debug)
Exemple #19
0
from core.error import LoggedException
from functools import wraps
from core.libs.bottle import request
from core.libs.peewee import OperationalError
from core.models import db
from settings import DB, DATABASE_TIMEOUT, RETRY_INTERVAL
from sys import exc_info
from time import clock, sleep

DBError, error_text = DB.db_warnings()

# what we might want to do is instead subclass transaction
# so we can use it without the decorator context

def transaction(func):
    @wraps(func)
    def wrapper(*a, **ka):
        db.connect()
        db.execute_sql('PRAGMA journal_mode=WAL;')
        db.execute_sql('PRAGMA busy_timeout=30000;')

        start = clock()
        while 1:
            try:
                with db.transaction():
                    fn = func(*a, **ka)
            except LoggedException as e:
                db.rollback()
                raise exc_info()[0](e.msg)
            except DBError as e:
                db.rollback()
Exemple #20
0
class TodoModel(DB.Model):
    """Todo model has all todo related functionality such as saving and retrieving from db"""

    __tablename__ = "todo"

    id = DB.Column(DB.Integer, primary_key=True)
    title = DB.Column(DB.String(256), nullable=False)
    desc = DB.Column(DB.String(256), nullable=True)
    status = DB.Column(DB.Boolean, nullable=False)
    created_at = DB.Column(DB.DateTime, default=datetime.utcnow())
    updated_at = DB.Column(DB.DateTime)
    owner = DB.Column(DB.Integer, DB.ForeignKey("user.id"))
    user = DB.relationship("UserModel")

    def __init__(self, title: str, desc: str, status: bool, owner: int):
        self.title = title
        self.desc = desc
        self.status = status
        self.owner = owner

    def to_json(self) -> dict:
        """ returns dict representation of the object"""
        return {
            "id": self.id,
            "title": self.title,
            "desc": self.desc,
            "status": self.status,
            "owner": self.owner,
        }

    def save_to_db(self):
        """Save (TodoModel) into db"""
        DB.session.add(self)
        DB.session.commit()

    def delete_from_db(self):
        """Delete (TodoModel) from db"""
        DB.session.delete(self)
        DB.session.commit()

    @classmethod
    def find_by_user(cls, user_id: str) -> "TodoModel":
        """return all todo items where owner is user_id"""
        return cls.query.filter_by(owner=user_id).all()

    @classmethod
    def find_by_id(cls, _id: int) -> "TodoModel":
        """returns one todo by id"""
        return cls.query.filter_by(id=_id).first()
Exemple #21
0
def step_4_pre():

    if get_ini('main', 'DO_DB_CHECK') is None:
        store_ini('main', 'DO_DB_CHECK', 'Y')
        from core.utils import reboot
        reboot()

    report = []

    from core.models import db, Template
    try:
        db.connect()
    except:
        raise

    db.close()

    report.append("Database connection successful.")

    from settings import DB
    DB.recreate_database()

    report.append("Database tables created successfully.")

    username = "******"
    email = get_ini("user", "email")
    password = get_ini("user", "password")
    blog_path = get_ini("install", "blog_path")

    from core.utils import encrypt_password
    p_key = get_ini('key', 'PASSWORD_KEY')
    password = encrypt_password(password, p_key)

    db.connect()

    with db.atomic():

        from core.models import Site
        new_site = Site.create(
            name="Your first site",
            description="The description for your first site.",
            url=get_ini('main', 'base_url_root'),
            path=blog_path)

        report.append("Initial site created successfully.")

        from core.models import User
        new_user = User(
            name='Administrator',
            email=email,
            encrypted_password=password)

        new_user.save_pwd()

        from core.auth import role

        new_user_permissions = new_user.add_permission(
            permission=role.SYS_ADMIN,
            site=new_site
            )

        new_user_permissions.save()

        report.append("Initial admin user created successfully.")

        plugindir = _join((_s.APPLICATION_PATH, 'data', 'plugins'))

        import shutil

        # TODO: warn on doing this?
        # this should only happen with a totally fresh install, not an upgrade

        install_directory = _join((_s.APPLICATION_PATH, _s.INSTALL_SRC_PATH))

        if (os.path.isdir(plugindir)):
            shutil.rmtree(plugindir)

        shutil.copytree(_join((install_directory, 'plugins')),
            plugindir)

        report.append("Default plugins copied successfully to data directory.")

        themedir = _join((_s.APPLICATION_PATH, 'data', 'themes'))

        if (os.path.isdir(themedir)):
            shutil.rmtree(themedir)

        shutil.copytree(_join((install_directory, 'themes')),
            themedir)

        report.append("Default themes copied successfully to data directory.")

        from core import plugins

        for x in os.listdir(plugindir):
            if (os.path.isdir(_join((plugindir, x))) is True and
                x != '__pycache__'):
                new_plugin = plugins.register_plugin(x, enable=True)
                report.append("New plugin '{}' installed successfully.".format(
                    new_plugin.name))

        from settings.defaults import DEFAULT_THEME
        from core.models import Theme
        new_theme = Theme.install_to_system(DEFAULT_THEME)

        report.append("Default theme created and installed successfully to system.")

        from core.models import Blog

        new_blog = Blog(
            site=new_site,
            name="Your first blog",
            description="The description for your first blog.",
            url=new_site.url,
            path=new_site.path,
            local_path=new_site.path,
            theme=new_theme
            )

        # TODO: add blog-level permission for new user as well

        new_blog.setup(new_user, new_theme)

        # TODO: Add default post

        report.append("Initial blog created successfully with default theme.")

    db.close()

    output_file_name = _join((_s.APPLICATION_PATH + _s.DATA_FILE_PATH, _s.INI_FILE_NAME))

    config_parser = ConfigParser()

    sections = ('db', 'path', 'key')

    for s in sections:
        for name, value in parser.items(s):
            try:
                config_parser.add_section(s)
            except DuplicateSectionError:
                pass
            config_parser.set(s, name, value)

#     if request.environ['HTTP_HOST'] == _s.DEFAULT_LOCAL_ADDRESS + _s.DEFAULT_LOCAL_PORT:
#         config_parser.add_section('server')
#         config_parser.set('server', 'DESKTOP_MODE', 'True')

    try:
        with open(output_file_name, "w", encoding='utf-8') as output_file:
            config_parser.write(output_file)
    except BaseException as e:
        raise SetupError(str(e.__class__.__name__) + ": " + str(e))

    try:
        os.remove(config_file_name)
    except OSError as e:
        from core.error import not_found
        if not_found(e) is False:
            raise e
    except Exception as e:
        raise e

    finished = '''
    <p>Installation is complete. <a href="{}">Return to the main page to begin using the application.</a>
    <script>
    $.get('/reboot',function(data){{}});
    </script>
    '''.format(_s.BASE_URL)

    return {'report':report,
        'finished':finished}
Exemple #22
0
def db_recreate(table_name='FileInfo'):
    '''
    Utility function for recreating a database table after changes
    '''

    from core import models
    table_model = getattr(models, table_name)
    table_ref = table_model._meta.db_table

    from settings import DB, EXPORT_FILE_PATH
    from core.libs.playhouse.dataset import DataSet

    n = []

    n.append("Beginning export process... Writing files to {}.".format(
        APPLICATION_PATH + EXPORT_FILE_PATH))

    xdb = DataSet(DB.dataset_connection())

    if os.path.isdir(APPLICATION_PATH + EXPORT_FILE_PATH) is False:
        os.makedirs(APPLICATION_PATH + EXPORT_FILE_PATH)
    with xdb.transaction():
        table = xdb[table_ref]
        n.append("Exporting table: " + table_name)
        filename = APPLICATION_PATH + EXPORT_FILE_PATH + '/dump-' + table_ref + '.json'
        table.freeze(format='json', filename=filename)
    xdb.close()

    n.append("Export process ended.")

    with db.atomic():
        db.drop_tables((table_model, ), safe=True)
        db.create_tables((table_model, ), safe=False)

    try:
        with xdb.transaction():
            n.append("Loading table " + table_name)
            try:
                table = xdb[table_ref]
            except:
                n.append("<p>Sorry, couldn't create table ", table_name)
            else:
                filename = (APPLICATION_PATH + EXPORT_FILE_PATH + '/dump-' +
                            table_ref + '.json')
                if os.path.exists(filename):
                    try:
                        table.thaw(format='json',
                                   filename=filename,
                                   strict=True)
                    except Exception as e:
                        n.append("<p>Sorry, error:{}".format(e))

                else:
                    n.append("No data for table " + table_name)
    except Exception as e:
        n.append('Ooops: {}'.e)
    else:
        xdb.query(DB.post_import())
        xdb.close()
        # DB.recreate_indexes()
        n.append("Import process ended.")

    return ('<p>'.join(n))
Exemple #23
0
def step_4_pre():

    if get_ini('main', 'DO_DB_CHECK') is None:
        store_ini('main', 'DO_DB_CHECK', 'Y')
        from core.utils import reboot
        reboot()

    report = []

    from core.models import db, Template
    try:
        db.connect()
    except:
        raise

    db.close()

    report.append("Database connection successful.")

    from settings import DB
    DB.recreate_database()

    report.append("Database tables created successfully.")

    username = "******"
    email = get_ini("user", "email")
    password = get_ini("user", "password")
    blog_path = get_ini("install", "blog_path")

    from core.utils import encrypt_password
    p_key = get_ini('key', 'PASSWORD_KEY')
    password = encrypt_password(password, p_key)

    db.connect()

    with db.atomic():

        from core.models import Site
        new_site = Site.create(
            name="Your first site",
            description="The description for your first site.",
            url=get_ini('main', 'base_url_root'),
            path=blog_path)

        report.append("Initial site created successfully.")

        from core.models import User
        new_user = User(name='Administrator',
                        email=email,
                        encrypted_password=password)

        new_user.save_pwd()

        from core.auth import role

        new_user_permissions = new_user.add_permission(
            permission=role.SYS_ADMIN, site=new_site)

        new_user_permissions.save()

        report.append("Initial admin user created successfully.")

        plugindir = _join((_s.APPLICATION_PATH, 'data', 'plugins'))

        import shutil

        # TODO: warn on doing this?
        # this should only happen with a totally fresh install, not an upgrade

        install_directory = _join((_s.APPLICATION_PATH, _s.INSTALL_SRC_PATH))

        if (os.path.isdir(plugindir)):
            shutil.rmtree(plugindir)

        shutil.copytree(_join((install_directory, 'plugins')), plugindir)

        report.append("Default plugins copied successfully to data directory.")

        themedir = _join((_s.APPLICATION_PATH, 'data', 'themes'))

        if (os.path.isdir(themedir)):
            shutil.rmtree(themedir)

        shutil.copytree(_join((install_directory, 'themes')), themedir)

        report.append("Default themes copied successfully to data directory.")

        from core import plugins

        for x in os.listdir(plugindir):
            if (os.path.isdir(_join((plugindir, x))) is True
                    and x != '__pycache__'):
                new_plugin = plugins.register_plugin(x, enable=True)
                report.append("New plugin '{}' installed successfully.".format(
                    new_plugin.name))

        from settings.defaults import DEFAULT_THEME
        from core.models import Theme
        new_theme = Theme.install_to_system(DEFAULT_THEME)

        report.append(
            "Default theme created and installed successfully to system.")

        from core.models import Blog

        new_blog = Blog(site=new_site,
                        name="Your first blog",
                        description="The description for your first blog.",
                        url=new_site.url,
                        path=new_site.path,
                        local_path=new_site.path,
                        theme=new_theme)

        # TODO: add blog-level permission for new user as well

        new_blog.setup(new_user, new_theme)

        # TODO: Add default post

        report.append("Initial blog created successfully with default theme.")

    db.close()

    output_file_name = _join(
        (_s.APPLICATION_PATH + _s.DATA_FILE_PATH, _s.INI_FILE_NAME))

    config_parser = ConfigParser()

    sections = ('db', 'path', 'key')

    for s in sections:
        for name, value in parser.items(s):
            try:
                config_parser.add_section(s)
            except DuplicateSectionError:
                pass
            config_parser.set(s, name, value)


#     if request.environ['HTTP_HOST'] == _s.DEFAULT_LOCAL_ADDRESS + _s.DEFAULT_LOCAL_PORT:
#         config_parser.add_section('server')
#         config_parser.set('server', 'DESKTOP_MODE', 'True')

    try:
        with open(output_file_name, "w", encoding='utf-8') as output_file:
            config_parser.write(output_file)
    except BaseException as e:
        raise SetupError(str(e.__class__.__name__) + ": " + str(e))

    try:
        os.remove(config_file_name)
    except OSError as e:
        from core.error import not_found
        if not_found(e) is False:
            raise e
    except Exception as e:
        raise e

    finished = '''
    <p>Installation is complete. <a href="{}">Return to the main page to begin using the application.</a>
    <script>
    $.get('/reboot',function(data){{}});
    </script>
    '''.format(_s.BASE_URL)

    return {'report': report, 'finished': finished}
Exemple #24
0
    with open_workbook(fname) as wb:
        s = wb.sheet_by_index(2)
        for row in range(1, s.nrows):
            SubscriptionArticle.get_or_create(
                subscription=s.cell(row, 0).value,
                article=s.cell(row, 1).value
            )
    return


if __name__ == '__main__':
    print('Using PRODUCTION database? %s' % SETTINGS['PRODUCTION_FLAG'])
    if len(sys.argv) > 1 and sys.argv[1] == '--nointeraction':
        pass
    else:
        r = input(
            'Deleting and initializing tables, press Enter to continue...\n')
        if r:
            print('Aborted')
            sys.exit(1)
    tables = [Subscription, Article, SubscriptionArticle]
    create_tables(tables, reset=True)

    # abs_path = os.path.abspath(__file__)
    # cur_dir = os.path.dirname(abs_path)
    # test_url = os.path.join(cur_dir, 'test/data/test_url.xlsx')
    # feed_subscription_url_from_xml(test_url)
    # feed_article_url_from_xls(test_url)
    # feed_subscription_article_from_xls(test_url)
    POSTGRES_DB.close()