def backup_to_dropbox(): from dropbox import client, session from conf import dropbox_access_key, dropbox_secret_key from webnotes.utils.backups import new_backup if not webnotes.conn: webnotes.connect() sess = session.DropboxSession(dropbox_access_key, dropbox_secret_key, "app_folder") sess.set_token(webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key"), webnotes.conn.get_value("Backup Manager", None, "dropbox_access_secret")) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup() filename = os.path.join(get_base_path(), "public", "backups", os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "database", dropbox_client) response = dropbox_client.metadata("/files") # upload files to files folder path = os.path.join(get_base_path(), "public", "files") for filename in os.listdir(path): found = False filepath = os.path.join(path, filename) for file_metadata in response["contents"]: if os.path.basename(filepath) == os.path.basename(file_metadata["path"]) and os.stat(filepath).st_size == int(file_metadata["bytes"]): found = True break if not found: upload_file_to_dropbox(filepath, "files", dropbox_client)
def execute(): from webnotes.utils import get_base_path website_py = os.path.join(get_base_path(), "app", "startup", "website.py") website_pyc = os.path.join(get_base_path(), "app", "startup", "website.pyc") if not os.path.exists(website_py) and os.path.exists(website_pyc): os.remove(website_pyc)
def backup_to_gdrive(): from webnotes.utils.backups import new_backup if not webnotes.conn: webnotes.connect() get_gdrive_flow() credentials_json = webnotes.conn.get_value("Backup Manager", None, "gdrive_credentials") credentials = oauth2client.client.Credentials.new_from_json( credentials_json) http = httplib2.Http() http = credentials.authorize(http) drive_service = build('drive', 'v2', http=http) # upload database backup = new_backup() path = os.path.join(get_base_path(), "public", "backups") filename = os.path.join(path, os.path.basename(backup.backup_path_db)) # upload files to database folder upload_files( filename, 'application/x-gzip', drive_service, webnotes.conn.get_value("Backup Manager", None, "database_folder_id")) # upload files to files folder did_not_upload = [] error_log = [] files_folder_id = webnotes.conn.get_value("Backup Manager", None, "files_folder_id") webnotes.conn.close() path = os.path.join(get_base_path(), "public", "files") for filename in os.listdir(path): filename = cstr(filename) found = False filepath = os.path.join(path, filename) ext = filename.split('.')[-1] size = os.path.getsize(filepath) if ext == 'gz' or ext == 'gzip': mimetype = 'application/x-gzip' else: mimetype = mimetypes.types_map.get( "." + ext) or "application/octet-stream" #Compare Local File with Server File children = drive_service.children().list( folderId=files_folder_id).execute() for child in children.get('items', []): file = drive_service.files().get(fileId=child['id']).execute() if filename == file['title'] and size == int(file['fileSize']): found = True break if not found: try: upload_files(filepath, mimetype, drive_service, files_folder_id) except Exception, e: did_not_upload.append(filename) error_log.append(cstr(e))
def on_update(self): webnotes.errprint("hiii") if not (os.path.exists(os.path.join(get_base_path(), "sites"))): self.make_primary_sites_settings() if not (os.path.exists(os.path.join(get_base_path(), "sites", self.doc.site_name))): self.create_new_site() self.update_global_defaults() webnotes.msgprint("Updated")
def on_update(self): # webnotes.errprint("hiii") # return "in update" if not (os.path.exists(os.path.join(get_base_path(), "sites"))): self.make_primary_sites_settings() if not (os.path.exists(os.path.join(get_base_path(), "sites", self.doc.site_name))): # webnotes.errprint("saurabh.p") self.create_new_site()
def update_db_name_pwd(self): os.path.join(get_base_path(), "sites", self.doc.site_name, 'site_config.json') with open (os.path.join(get_base_path(), "sites", self.doc.site_name, 'site_config.json'), 'r') as site_config: lines = site_config.readlines() db_name = lines[1].split(':')[1].replace('"','')[:-3] db_pwd = lines[2].split(':')[1].replace('"','')[:-1] webnotes.conn.sql("update `tabSite Details` set database_name = LTRIM('%s'), database_password = LTRIM('%s') where name = '%s' "%(db_name, db_pwd, self.doc.name)) webnotes.conn.sql("commit")
def execute(): # remove pyc files utils_pyc = os.path.join(get_base_path(), "app", "selling", "utils.pyc") if os.path.exists(utils_pyc): os.remove(utils_pyc) old_path = os.path.join(get_base_path(), "app", "website") if os.path.exists(old_path): shutil.rmtree(old_path)
def create_new_site(self): root_password = webnotes.conn.get_value("Global Defaults", None, "mysql_root_password") exec_in_shell("""{path}/lib/wnf.py --install {dbname} --root-password {root_password} --site {name} """.format(path=get_base_path(), dbname=self.doc.site_name.replace('.', '_'), root_password=root_password, name=self.doc.site_name)) self.add_to_hosts() exec_in_shell("{path}/lib/wnf.py --build".format(path=get_base_path())) self.update_db_name_pwd()
def backup_to_dropbox(): from dropbox import client, session from conf import dropbox_access_key, dropbox_secret_key from webnotes.utils.backups import new_backup if not webnotes.conn: webnotes.connect() sess = session.DropboxSession(dropbox_access_key, dropbox_secret_key, "app_folder") sess.set_token( webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key"), webnotes.conn.get_value("Backup Manager", None, "dropbox_access_secret")) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup() filename = os.path.join(get_base_path(), "public", "backups", os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) webnotes.conn.close() response = dropbox_client.metadata("/files") # upload files to files folder did_not_upload = [] error_log = [] path = os.path.join(get_base_path(), "public", "files") for filename in os.listdir(path): filename = cstr(filename) if filename in ignore_list: continue found = False filepath = os.path.join(path, filename) for file_metadata in response["contents"]: if os.path.basename(filepath) == os.path.basename( file_metadata["path"]) and os.stat( filepath).st_size == int(file_metadata["bytes"]): found = True break if not found: try: upload_file_to_dropbox(filepath, "/files", dropbox_client) except Exception: did_not_upload.append(filename) error_log.append(webnotes.getTraceback()) webnotes.connect() return did_not_upload, list(set(error_log))
def backup_to_gdrive(): from webnotes.utils.backups import new_backup if not webnotes.conn: webnotes.connect() get_gdrive_flow() credentials_json = webnotes.conn.get_value("Backup Manager", None, "gdrive_credentials") credentials = oauth2client.client.Credentials.new_from_json(credentials_json) http = httplib2.Http() http = credentials.authorize(http) drive_service = build('drive', 'v2', http=http) # upload database backup = new_backup() path = os.path.join(get_base_path(), "public", "backups") filename = os.path.join(path, os.path.basename(backup.backup_path_db)) # upload files to database folder upload_files(filename, 'application/x-gzip', drive_service, webnotes.conn.get_value("Backup Manager", None, "database_folder_id")) # upload files to files folder did_not_upload = [] error_log = [] files_folder_id = webnotes.conn.get_value("Backup Manager", None, "files_folder_id") webnotes.conn.close() path = os.path.join(get_base_path(), "public", "files") for filename in os.listdir(path): found = False filepath = os.path.join(path, filename) ext = filename.split('.')[-1] size = os.path.getsize(filepath) if ext == 'gz' or ext == 'gzip': mimetype = 'application/x-gzip' else: mimetype = mimetypes.types_map.get("." + ext) or "application/octet-stream" #Compare Local File with Server File param = {} children = drive_service.children().list(folderId=files_folder_id, **param).execute() for child in children.get('items', []): file = drive_service.files().get(fileId=child['id']).execute() if filename == file['title'] and size == int(file['fileSize']): found = True break if not found: try: upload_files(filepath, mimetype, drive_service, files_folder_id) except Exception, e: did_not_upload.append(filename) error_log.append(cstr(e))
def initiate_tenant_ctreation(self, root_password, site_name, is_parent=False): exec_in_shell("""{path}/lib/wnf.py --install {dbname} --root-password {root_password} --site {name} """.format(path=get_base_path(), dbname=site_name[:16].replace('.', '_'), root_password=root_password, name=site_name)) self.add_to_hosts(site_name) exec_in_shell("{path}/lib/wnf.py --build".format(path=get_base_path())) if is_parent: self.update_db_name_pwd() else: self.update_child_details(site_name)
def backup_to_dropbox(): from dropbox import client, session from conf import dropbox_access_key, dropbox_secret_key from webnotes.utils.backups import new_backup if not webnotes.conn: webnotes.connect() sess = session.DropboxSession(dropbox_access_key, dropbox_secret_key, "app_folder") sess.set_token( webnotes.conn.get_value("Backup Manager", None, "dropbox_access_key"), webnotes.conn.get_value("Backup Manager", None, "dropbox_access_secret"), ) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup() filename = os.path.join(get_base_path(), "public", "backups", os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) webnotes.conn.close() response = dropbox_client.metadata("/files") # upload files to files folder did_not_upload = [] error_log = [] path = os.path.join(get_base_path(), "public", "files") for filename in os.listdir(path): filename = cstr(filename) if filename in ignore_list: continue found = False filepath = os.path.join(path, filename) for file_metadata in response["contents"]: if os.path.basename(filepath) == os.path.basename(file_metadata["path"]) and os.stat( filepath ).st_size == int(file_metadata["bytes"]): found = True break if not found: try: upload_file_to_dropbox(filepath, "/files", dropbox_client) except Exception: did_not_upload.append(filename) error_log.append(webnotes.getTraceback()) webnotes.connect() return did_not_upload, list(set(error_log))
def create_site(): from webnotes.model.code import get_obj webnotes.errprint('test') sites = webnotes.conn.sql("""select name from `tabSite Details` where flag = 'False' """,as_list=1) # webnotes.errprint(sites) for site in sites: """For Primary site creation, checks site path exist or not""" if not (os.path.exists(os.path.join(get_base_path(), "sites"))): get_obj('Site Details', site[0]).make_primary_sites_settings() """For secondary sites""" if not (os.path.exists(os.path.join(get_base_path(), "sites", site[0]))): get_obj('Site Details', site[0]).create_new_site() # webnotes.conn.sql("""update `tabSite Details` set flag = 'True' where name = '%s' """%(site[0]),as_list=1)
def store_stock_reco_json(): import os import json from webnotes.utils.datautils import read_csv_content from webnotes.utils import get_base_path files_path = os.path.join(get_base_path(), "public", "files") list_of_files = os.listdir(files_path) replaced_list_of_files = [f.replace("-", "") for f in list_of_files] for reco, file_list in webnotes.conn.sql("""select name, file_list from `tabStock Reconciliation`"""): if file_list: file_list = file_list.split("\n") stock_reco_file = file_list[0].split(",")[1] stock_reco_file_path = os.path.join(files_path, stock_reco_file) if not os.path.exists(stock_reco_file_path): if stock_reco_file in replaced_list_of_files: stock_reco_file_path = os.path.join(files_path, list_of_files[replaced_list_of_files.index(stock_reco_file)]) else: stock_reco_file_path = "" if stock_reco_file_path: with open(stock_reco_file_path, "r") as open_reco_file: content = open_reco_file.read() try: content = read_csv_content(content) reconciliation_json = json.dumps(content, separators=(',', ': ')) webnotes.conn.sql("""update `tabStock Reconciliation` set reconciliation_json=%s where name=%s""", (reconciliation_json, reco)) except Exception: # if not a valid CSV file, do nothing pass
def setup_account(args=None): # if webnotes.conn.sql("select name from tabCompany"): # webnotes.throw(_("Setup Already Complete!!")) if not args: args = webnotes.local.form_dict if isinstance(args, basestring): args = json.loads(args) args = webnotes._dict(args) update_profile_name(args) create_fiscal_year_and_company(args) set_defaults(args) create_territories() # create_price_lists(args) create_feed_and_todo() import_core_docs() # create_email_digest() # create_letter_head(args) # create_taxes(args) # create_items(args) # create_customers(args) # create_suppliers(args) webnotes.conn.set_value('Control Panel', None, 'home_page', 'desktop') webnotes.clear_cache() webnotes.conn.commit() # suppress msgprints webnotes.local.message_log = [] exec_in_shell("""cp -r {path}/lib/public/datatable {path}/public/files """.format(path=get_base_path())) webnotes.conn.sql("CREATE TABLE ack(ENCOUNTER_ID varchar(20),ACK varchar(20))") webnotes.conn.sql("commit()") return "okay"
def make(site=None): """make public folder symlinks if missing""" from webnotes.utils import get_site_base_path, get_base_path, get_path webnotes.init(site=site) site_path = get_site_base_path() if site else get_base_path() # setup standard folders for param in (("public_path", "public"), ("backup_path", "public/backups"), ("files_path", "public/files")): path = os.path.join(site_path, webnotes.conf.get(param[0], param[1])) if not os.path.exists(path): os.mkdir(path) # setup js and css folders if not site: for folder in ("js", "css"): path = get_path(webnotes.conf.get("public_path", "public"), folder) if not os.path.exists(path): os.mkdir(path) os.chdir(webnotes.conf.get("public_path", "public")) symlinks = [ ["app", "../app/public"], ["lib", "../lib/public"], ] for link in symlinks: if not os.path.exists(link[0]) and os.path.exists(link[1]): os.symlink(link[1], link[0]) os.chdir("..")
def sync_db(self, patient_id=None): if not (os.path.exists( os.path.join(get_base_path(), "public", 'dbsync.txt'))): self.validate_tenant_barcode() self.initiate_sync_db(patient_id) else: self.initiate_sync_db(patient_id)
def setup_account(args=None): # if webnotes.conn.sql("select name from tabCompany"): # webnotes.throw(_("Setup Already Complete!!")) if not args: args = webnotes.local.form_dict if isinstance(args, basestring): args = json.loads(args) args = webnotes._dict(args) update_profile_name(args) create_fiscal_year_and_company(args) set_defaults(args) create_territories() # create_price_lists(args) create_feed_and_todo() import_core_docs() # create_email_digest() # create_letter_head(args) # create_taxes(args) # create_items(args) # create_customers(args) # create_suppliers(args) webnotes.conn.set_value('Control Panel', None, 'home_page', 'desktop') webnotes.clear_cache() webnotes.conn.commit() # suppress msgprints webnotes.local.message_log = [] exec_in_shell("""cp -r {path}/lib/public/datatable {path}/public/files """.format(path=get_base_path())) webnotes.conn.sql( "CREATE TABLE ack(ENCOUNTER_ID varchar(20),ACK varchar(20))") return "okay"
def generate_barcode(self): webnotes.errprint([self.doc.naming_series]) # self.doc.patient_online_id=self.doc.name # from barcode.writer import ImageWriter # ean = barcode.get('code39','123322ABS232') # webnotes.errprint(ean) # path = os.path.join(get_base_path(), "public", "barcode_img")+"/"+self.doc.name # fullname = ean.save(path) # barcode_img = '<html>\ # <table style="width: 100%; table-layout: fixed;">\ # <tr>\ # <td style="width:510px">\ # <img src="'"/barcode_img/"+self.doc.name+".png"'" width="200px">\ # </td>\ # </tr>\ # </table>\ # </html>' #s="23232ASA343222" s=self.doc.name import barcode from barcode.writer import ImageWriter ean = barcode.get('code39', s, writer=ImageWriter()) path = os.path.join(get_base_path(), "public", "barcode_img")+"/"+s filename = ean.save(path) barcode_img = '<html>\ <table style="width: 100%; table-layout: fixed;">\ <tr>\ <td style="width:510px">\ <img src="'"../barcode_img/"+s+".png"'" width="200px">\ </td>\ </tr>\ </table>\ </html>' self.doc.barcode_image = barcode_img self.doc.save()
def get_item_for_list_in_html(context): from jinja2 import Environment, FileSystemLoader scrub_item_for_list(context) jenv = Environment(loader=FileSystemLoader(get_base_path())) template = jenv.get_template( "app/stock/doctype/item/templates/includes/product_in_grid.html") return template.render(context)
def clear_pyc_files(): from webnotes.utils import get_base_path for path, folders, files in os.walk(get_base_path()): if 'locale' in folders: folders.remove('locale') for f in files: if f.decode("utf-8").endswith(".pyc"): os.remove(os.path.join(path, f))
def get_uuid(): import xml.etree.ElementTree as ET tree = ET.parse('{path}/hardware.xml'.format(path=os.path.join(get_base_path(), "public", "files"))) root = tree.getroot() for child in root.iter('setting'): if child.attrib['id'] == 'uuid': return child.attrib['value']
def create_site(): from webnotes.model.code import get_obj webnotes.errprint('test') sites = webnotes.conn.sql( """select name from `tabSite Details` where flag = 'False' """, as_list=1) # webnotes.errprint(sites) for site in sites: """For Primary site creation, checks site path exist or not""" if not (os.path.exists(os.path.join(get_base_path(), "sites"))): get_obj('Site Details', site[0]).make_primary_sites_settings() """For secondary sites""" if not (os.path.exists(os.path.join(get_base_path(), "sites", site[0]))): get_obj('Site Details', site[0]).create_new_site() # webnotes.conn.sql("""update `tabSite Details` set flag = 'True' where name = '%s' """%(site[0]),as_list=1)
def _sub(match): require_path = re.search('["\'][^"\']*["\']', match.group(0)).group(0)[1:-1] fpath = os.path.join(get_base_path(), require_path) if os.path.exists(fpath): with open(fpath, 'r') as f: return '\n' + unicode(f.read(), "utf-8") + '\n' else: return 'wn.require("%s")' % require_path
def update_child_details(self, sub_tenant_url): with open (get_base_path()+'/sites/'+sub_tenant_url+'/site_config.json', 'r') as site_config: lines = site_config.readlines() db_name = lines[1].split(':')[1].replace('"','')[:-3] db_pwd = lines[2].split(':')[1].replace('"','')[:-1] webnotes.conn.sql("update `tabSub Tenant Details` set db = LTRIM('%s'), pwd = LTRIM('%s') where sub_tenant_url = '%s' "%(db_name, db_pwd, sub_tenant_url), debug=1) webnotes.conn.sql("commit")
def get_lang_dict(): languages_path = os.path.join(get_base_path(), "app", "translations", "languages.json") if os.path.exists(languages_path): with open(languages_path, "r") as langfile: return json.loads(langfile.read()) else: return {}
def add_to_hosts(self): webnotes.errprint("host") with open('/etc/hosts', 'rt') as f: s = f.read() + '\n' + '127.0.0.1\t\t\t %s \n'%self.doc.site_name with open('hosts', 'wt') as outf: outf.write(s) os.system('echo gangadhar | sudo -S mv {path}/hosts /etc/hosts'.format(path=get_base_path()))
def execute(): from webnotes.utils import get_base_path import shutil import os utils_path = os.path.join(get_base_path(), "app", "accounts", "utils") if os.path.exists(utils_path): shutil.rmtree(utils_path)
def execute(): import shutil from webnotes.utils import get_base_path for dt in ("item_price", "price_list"): path = os.path.join(get_base_path(), "app", "setup", "doctype", dt) if os.path.exists(path): shutil.rmtree(path)
def get_uuid(): import xml.etree.ElementTree as ET tree = ET.parse('{path}/hardware.xml'.format( path=os.path.join(get_base_path(), "public", "files"))) root = tree.getroot() for child in root.iter('setting'): if child.attrib['id'] == 'uuid': return child.attrib['value']
def execute(): # find out when was the file list patch run res = webnotes.conn.sql("""select applied_on from `__PatchLog` where patch='patches.december_2012.file_list_rename' order by applied_on desc limit 1""") if res: patch_date = res[0][0].date() files_path = os.path.join(get_base_path(), "public", "files") change_map = {} file_data_list = webnotes.conn.sql("""select name, file_name from `tabFile Data` where date(modified) <= %s and ifnull(file_url, '')='' and name like "%%-%%" """, patch_date) # print patch_date # print file_data_list # print files_path for fid, file_name in file_data_list: if os.path.exists(os.path.join(files_path, fid)): new_fid, new_file_name = fid.replace("-", ""), file_name.replace("-", "") try: webnotes.conn.sql("""update `tabFile Data` set name=%s, file_name=%s where name=%s""", (new_fid, new_file_name, fid)) os.rename(os.path.join(files_path, fid), os.path.join(files_path, new_fid)) change_map[",".join((file_name, fid))] = ",".join((new_file_name, new_fid)) except Exception, e: # if duplicate entry, then dont update if e[0]!=1062: raise e changed_keys = change_map.keys() for dt in webnotes.conn.sql("""select distinct parent from tabDocField where fieldname='file_list'"""): try: data = webnotes.conn.sql("""select name, file_list from `tab%s` where ifnull(file_list, '')!=''""" % dt[0]) for name, file_list in data: new_file_list = [] file_list = file_list.split("\n") for f in file_list: if f in changed_keys: new_file_list.append(change_map[f]) else: new_file_list.append(f) if new_file_list != file_list: webnotes.conn.sql("""update `tab%s` set file_list=%s where name=%s""" % (dt[0], "%s", "%s"), ("\n".join(new_file_list), name)) except Exception, e: if e[0]!=1146: raise e
def make_primary_sites_settings(self): # webnotes.errprint("tre") exec_in_shell("""mkdir {path}/sites """.format(path=get_base_path())) with open(os.path.join(get_base_path(), "conf.py"), "a") as conf_file: conf_file.write('\nsites_dir = "%s"' % ("{path}/sites".format(path=get_base_path()))) exec_in_shell(""" mkdir -p {path}/sites/{site_name}/ """.format(path=get_base_path(), site_name= self.doc.site_name)) exec_in_shell(""" mv {path}/public {path}/sites/{site_name}/public """.format(path=get_base_path(), site_name= self.doc.site_name)) with open("conf.py") as temp: lines = temp.readlines() db_name = lines[7][:-1].split('=') db_name = '"'+ db_name[0] + '" :'+ db_name[1].replace("'", '"') db_password = lines[8][:-1].split('=') db_password = '******'+ db_password[0] + '" :'+ db_password[1].replace("'", '"') with open(os.path.join(get_base_path(), "site_config.json"), "w") as conf_file: conf_file.write("{\n"+db_name+",\n"+db_password+"\n}") exec_in_shell(""" mv {path}/site_config.json {path}/sites/{site_name}/ """.format(path=get_base_path(), site_name= self.doc.site_name)) exec_in_shell(""" ./lib/wnf.py --build """) self.add_to_hosts()
def make_primary_sites_settings(self): # webnotes.errprint("tre") exec_in_shell("""mkdir {path}/sites """.format(path=get_base_path())) with open(os.path.join(get_base_path(), "conf.py"), "a") as conf_file: conf_file.write('\nsites_dir = "%s"' % ("{path}/sites".format(path=get_base_path()))) exec_in_shell(""" mkdir -p {path}/sites/{site_name}/ """.format(path=get_base_path(), site_name=self.doc.site_name)) exec_in_shell(""" mv {path}/public {path}/sites/{site_name}/public """.format(path=get_base_path(), site_name=self.doc.site_name)) with open("conf.py") as temp: lines = temp.readlines() db_name = lines[7][:-1].split('=') db_name = '"' + db_name[0] + '" :' + db_name[1].replace("'", '"') db_password = lines[8][:-1].split('=') db_password = '******' + db_password[0] + '" :' + db_password[1].replace( "'", '"') with open(os.path.join(get_base_path(), "site_config.json"), "w") as conf_file: conf_file.write("{\n" + db_name + ",\n" + db_password + "\n}") exec_in_shell(""" mv {path}/site_config.json {path}/sites/{site_name}/ """.format(path=get_base_path(), site_name=self.doc.site_name)) exec_in_shell(""" ./lib/wnf.py --build """) self.update_nginx_conf()
def get_local_settings(self, table): return { 'dbuser': self.doc.dbuser, 'dbuserpassword': self.doc.dbuserpassword, 'dbname': self.doc.dbname, 'file_path': os.path.join(get_base_path(), "public", "files"), 'file_name': table.replace(' ', '_'), 'tab': table }
def backup_to_gdrive(): from webnotes.utils.backups import new_backup found_database = False found_files = False if not webnotes.conn: webnotes.connect() flow = get_gdrive_flow() credentials_json = webnotes.conn.get_value("Backup Manager", None, "gdrive_credentials") credentials = oauth2client.client.Credentials.new_from_json(credentials_json) http = httplib2.Http() http = credentials.authorize(http) drive_service = build('drive', 'v2', http=http) # upload database backup = new_backup() path = os.path.join(get_base_path(), "public", "backups") filename = os.path.join(path, os.path.basename(backup.backup_path_db)) # upload files to database folder upload_files(filename, 'application/x-gzip', drive_service, webnotes.conn.get_value("Backup Manager", None, "database_folder_id")) # upload files to files folder path = os.path.join(get_base_path(), "public", "files") for files in os.listdir(path): filename = path + "/" + files ext = filename.split('.')[-1] size = os.path.getsize(filename) if ext == 'gz' or ext == 'gzip': mimetype = 'application/x-gzip' else: mimetype = mimetypes.types_map["." + ext] #Compare Local File with Server File param = {} children = drive_service.children().list( folderId=webnotes.conn.get_value("Backup Manager", None, "files_folder_id"), **param).execute() for child in children.get('items', []): file = drive_service.files().get(fileId=child['id']).execute() if files == file['title'] and size == int(file['fileSize']): found_files = True break if not found_files: upload_files(filename, mimetype, drive_service, webnotes.conn.get_value("Backup Manager", None, "files_folder_id"))
def add_to_hosts(self, site_name): webnotes.errprint("host") with open('/etc/hosts', 'rt') as f: s = f.read() + '\n' + '127.0.0.1\t\t\t %s \n' % site_name with open('hosts', 'wt') as outf: outf.write(s) os.system( 'echo MedSynaptic | sudo -S mv {path}/hosts /etc/hosts'.format( path=get_base_path()))
def update_this_app(): import conf if hasattr(conf, "expires_on"): return _("This feature is only applicable to self hosted instances") from webnotes.utils import execute_in_shell, cstr, get_base_path err, out = execute_in_shell("cd %s && exec ssh-agent lib/wnf.py --update origin master" % \ (get_base_path(),)) return "\n".join(filter(None, [cstr(err), cstr(out)]))
def get_remote_settings(self, table, cond=None, patient_id=None): if cond: if table == 'tabPatient Register': cond = """--where="name='%s'" """%patient_id elif table == 'tabPatient Encounter Entry': cond = """--where="patient='%s'" """%patient_id return {'host_id':self.doc.host_id, 'host_ssh_user':self.doc.host_ssh_user, 'host_ssh_password':self.doc.host_ssh_password, 'remote_dbuser':self.doc.remote_dbuser, 'remote_dbuserpassword': self.doc.remote_dbuserpassword, 'remote_dbname': self.doc.remote_dbname, 'file_path':os.path.join(get_base_path(), "public", "files"), 'parameter':'%', 'file_name':table.replace(' ','_'), 'tab': table, 'cond':cond if cond else ''}
def initiate_tenant_ctreation(self, root_password, site_name, is_parent=False): exec_in_shell( """{path}/lib/wnf.py --install {dbname} --root-password {root_password} --site {name} """.format(path=get_base_path(), dbname=site_name[:16].replace('.', '_'), root_password=root_password, name=site_name)) self.add_to_hosts(site_name) exec_in_shell("{path}/lib/wnf.py --build".format(path=get_base_path())) if is_parent: self.update_db_name_pwd() else: self.update_child_details(site_name)
def get_module_path(module): """Returns path of the given module""" from webnotes.utils import get_base_path m = scrub(module) app_path = get_base_path() if m in ('core', 'website'): return os.path.join(app_path, 'lib', m) else: return os.path.join(app_path, 'app', m)
def get_jenv(): from jinja2 import Environment, FileSystemLoader from webnotes.utils import get_base_path, global_date_format from markdown2 import markdown from json import dumps jenv = Environment(loader = FileSystemLoader(get_base_path())) jenv.filters["global_date_format"] = global_date_format jenv.filters["markdown"] = markdown jenv.filters["json"] = dumps return jenv
def get_jenv(): from jinja2 import Environment, FileSystemLoader from webnotes.utils import get_base_path, global_date_format from markdown2 import markdown from json import dumps jenv = Environment(loader=FileSystemLoader(get_base_path())) jenv.filters["global_date_format"] = global_date_format jenv.filters["markdown"] = markdown jenv.filters["json"] = dumps return jenv
def get_geo_ip_country(ip_addr): try: import pygeoip except ImportError: return import os from webnotes.utils import get_base_path geo_ip_file = os.path.join(get_base_path(), "lib", "data", "GeoIP.dat") geo_ip = pygeoip.GeoIP(geo_ip_file, pygeoip.MEMORY_CACHE) return geo_ip.country_name_by_addr(ip_addr)
def update_db_name_pwd(self): with open( get_base_path() + '/sites/' + self.doc.site_name + '/site_config.json', 'r') as site_config: lines = site_config.readlines() db_name = lines[1].split(':')[1].replace('"', '')[:-3] db_pwd = lines[2].split(':')[1].replace('"', '')[:-1] webnotes.conn.sql( "update `tabSite Details` set database_name = LTRIM('%s'), database_password = LTRIM('%s') where name = '%s' " % (db_name, db_pwd, self.doc.name), debug=1) webnotes.conn.sql("commit")
def update_child_details(self, sub_tenant_url): with open( get_base_path() + '/sites/' + sub_tenant_url + '/site_config.json', 'r') as site_config: lines = site_config.readlines() db_name = lines[1].split(':')[1].replace('"', '')[:-3] db_pwd = lines[2].split(':')[1].replace('"', '')[:-1] webnotes.conn.sql( "update `tabSub Tenant Details` set db = LTRIM('%s'), pwd = LTRIM('%s') where sub_tenant_url = '%s' " % (db_name, db_pwd, sub_tenant_url), debug=1) webnotes.conn.sql("commit")
def execute(): import webnotes, os, shutil from webnotes.utils import get_base_path webnotes.delete_doc('Page', 'stock-ledger') webnotes.delete_doc('Page', 'stock-ageing') webnotes.delete_doc('Page', 'stock-level') webnotes.delete_doc('Page', 'general-ledger') for d in [["stock", "stock_ledger"], ["stock", "stock_ageing"], ["stock", "stock_level"], ["accounts", "general_ledger"]]: path = os.path.join(get_base_path(), "app", d[0], "page", d[1]) if os.path.exists(path): shutil.rmtree(path)
def set_sync_date(self): # webnotes.errprint("in the sync_db") file_path=os.path.join(get_base_path(), "public") # webnotes.errprint(file_path+'/'+"dbsync.txt") f2=file_path+'/'+"dbsync.txt" salt = self.get_salt() digest = self.encrypt(salt,cstr(today())) file = open(f2, "w+") file.write(digest) file.write(",") file.write(salt) file.close() webnotes.conn.sql("update tabSingles set value = '%s' where doctype = 'Global Defaults' and field = 'last_sync_date'"%(today())) webnotes.conn.sql("commit")
def set_sync_date(self): # webnotes.errprint("in the sync_db") file_path = os.path.join(get_base_path(), "public") # webnotes.errprint(file_path+'/'+"dbsync.txt") f2 = file_path + '/' + "dbsync.txt" salt = self.get_salt() digest = self.encrypt(salt, cstr(today())) file = open(f2, "w+") file.write(digest) file.write(",") file.write(salt) file.close() webnotes.conn.sql( "update tabSingles set value = '%s' where doctype = 'Global Defaults' and field = 'last_sync_date'" % (today())) webnotes.conn.sql("commit")
def generate_barcode(self): webnotes.errprint([self.doc.naming_series]) self.doc.patient_online_id=self.doc.name from barcode.writer import ImageWriter ean = barcode.get('code39',self.doc.patient_online_id,writer=ImageWriter()) path = os.path.join(get_base_path(), "public", "barcode_img")+"/"+self.doc.name fullname = ean.save(path) barcode_img = '<html>\ <table style="width: 100%; table-layout: fixed;">\ <tr>\ <td style="width:510px">\ <img src="'"/barcode_img/"+self.doc.name+".png"'" width="200px">\ </td>\ </tr>\ </table>\ </html>' self.doc.barcode_image = barcode_img
def last_sync(self): from webnotes.utils import today, date_diff, cint import os.path last_sync_date = '' if cstr( webnotes.conn.get_value('Global Defaults', None, 'db_sync_flag')) == 'Yes': file_path = os.path.join(get_base_path(), "public") f2 = file_path + '/' + "dbsync.txt" if os.path.exists(f2): for line in open(f2, "r"): msg, key = line.split(",") decrypt = self.decrypt(key, msg) try: last_sync_date = getdate(decrypt) except Exception, e: self.fail( 'There are some manual interpretation with system file.Please Sync to continue' )
def get_remote_settings(self, table, cond=None, patient_id=None): if cond: if table == 'tabPatient Register': cond = """--where="name='%s'" """ % patient_id elif table == 'tabPatient Encounter Entry': cond = """--where="patient='%s'" """ % patient_id return { 'host_id': self.doc.host_id, 'host_ssh_user': self.doc.host_ssh_user, 'host_ssh_password': self.doc.host_ssh_password, 'remote_dbuser': self.doc.remote_dbuser, 'remote_dbuserpassword': self.doc.remote_dbuserpassword, 'remote_dbname': self.doc.remote_dbname, 'file_path': os.path.join(get_base_path(), "public", "files"), 'parameter': '%', 'file_name': table.replace(' ', '_'), 'tab': table, 'cond': cond if cond else '' }