def get_payment_url(self, **kwargs): #create txn, save, return brws url params = { "returnurl": "http://{}/{}/{}".format( get_site_base_path()[2:], "api/method/paynow_gateway.paynow_gateway.doctype.paynow_settings.paynow_settings.check_if_paid?", kwargs.get( "reference_docname")), #api post dynamic for website "resulturl": "http://{}/{}".format( get_site_base_path()[2:], "orders" ), #redirection page after payment. shld be dynamic: fail / success OR API call which checks failure or success of txn based on ref "reference": kwargs.get("reference_docname"), #get from transaction "amount": kwargs.get("amount"), #get from transaction "id": self.paynow_integration_id, #Keep elsewhere "additionalinfo": kwargs.get("description"), "authemail": kwargs.get("payer_email"), "status": "Message" } # concertanate nd hash, request, get response query_string = self.paynow_create_url_query( params, self.paynow_integration_key) paynow_request = Request(self.paynow_init_url) result = urlopen(paynow_request, query_string) result = result.read().decode('utf-8') #checking if request was successful if self.check_initiate_response(result, self.paynow_integration_key): response = {} parts = result.split('&') for part in parts: indparts = part.split("=") response[indparts[0]] = unquote(indparts[1]) #response is a dict of results #save info smewhere payment req?? hidden. bind to transaction #lets c where i need the stuff n hw to retrieve. global pollurl global urlhash pollurl = response['pollurl'] urlhash = response['hash'] #not accessible if func not run in same instance. #dont run in another, duplicate txn integration_request = create_request_log(kwargs, "Host", "Paynow") #data = json.loads(integration_request.data) #data={"pollurl": response['pollurl'], "urlhash": response['hash'] } return response['browserurl']
def get_path_assets_js(): #base = get_site_base_path() base = get_fluorine_conf("sites_path") if not base: base = get_site_base_path() #print "sites path in get_path_assets_js {}".format(os.path.realpath(base)) js_path = os.path.realpath(os.path.join(base, "..", "assets", "js")) #if not base: # base = os.getcwd() # js_path = os.path.realpath(os.path.join(base, "..", "assets", "js")) else: js_path = os.path.realpath(os.path.join(base, "assets", "js")) #js_path = os.path.realpath(os.path.join(base, "..", "assets", "js")) print "js_path {}".format(js_path) return js_path
def backup_to_service(): from frappe.utils.backups import new_backup from frappe.utils import get_files_path # upload files to files folder did_not_upload = [] error_log = [] if not frappe.db: frappe.connect() older_than_hrs = cint( frappe.db.get_value('Backup Manager', None, 'older_than')) cloud_sync = cint(frappe.db.get_value('Backup Manager', None, 'cloud_sync')) # site = cstr(frappe.local.site) site = get_site_base_path()[2:] if cint(frappe.db.get_value("Backup Manager", None, "enable_database")): # upload database backup = new_backup(older_than_hrs, ignore_files=True) # filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) if cloud_sync: sync_folder(site, older_than_hrs, get_backups_path(), "database", did_not_upload, error_log) BASE_DIR = os.path.join(get_backups_path(), '../file_backups') if cint(frappe.db.get_value("Backup Manager", None, "enable_files")): Backup_DIR = os.path.join(BASE_DIR, "files") compress_files(get_files_path(), Backup_DIR) if cloud_sync: sync_folder(site, older_than_hrs, Backup_DIR, "public-files", did_not_upload, error_log) if cint(frappe.db.get_value("Backup Manager", None, "enable_private_files")): Backup_DIR = os.path.join(BASE_DIR, "private/files") compress_files(get_files_path(is_private=1), Backup_DIR, "private") if cloud_sync: sync_folder(site, older_than_hrs, Backup_DIR, "private-files", did_not_upload, error_log) frappe.db.close() # frappe.connect() return did_not_upload, list(set(error_log))
def upload(): #guess file name base_dir = os.path.join(get_site_base_path(), "public/pixelposdata/") yesterday = (datetime.now() - timedelta(days=1)).strftime("%Y%m%d") pos_filename = 'POSDATA' + yesterday + '.csv' cogs_filename = 'COGSDATA' + yesterday + '.csv' purdata_filename = 'PURDATA' + yesterday + '.csv' pos_filepath = os.path.join(base_dir, pos_filename) cogs_filepath = os.path.join(base_dir, cogs_filename) purdata_filepath = os.path.join(base_dir, purdata_filename) #check for JV creation permission if not frappe.has_permission("Journal Entry", "create"): raise frappe.PermissionError #Import files to JV upload_file(pos_filepath, 'POS Summary', yesterday, pos_filename) upload_file(cogs_filepath, 'Cost of Goods Sold', yesterday, cogs_filename) upload_file(purdata_filepath, 'Purchase data', yesterday, purdata_filename)
def backup_to_service(): from frappe.utils.backups import new_backup from frappe.utils import get_files_path # upload files to files folder did_not_upload = [] error_log = [] if not frappe.db: frappe.connect() older_than_hrs = cint(frappe.db.get_value('Backup Manager', None, 'older_than')) cloud_sync = cint(frappe.db.get_value('Backup Manager', None, 'cloud_sync')) # site = cstr(frappe.local.site) site = get_site_base_path()[2:] if cint(frappe.db.get_value("Backup Manager", None, "enable_database")): # upload database backup = new_backup(older_than_hrs,ignore_files=True) # filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) if cloud_sync: sync_folder(site,older_than_hrs,get_backups_path(), "database",did_not_upload,error_log) BASE_DIR = os.path.join( get_backups_path(), '../file_backups' ) if cint(frappe.db.get_value("Backup Manager", None, "enable_files")): Backup_DIR = os.path.join(BASE_DIR, "files") compress_files(get_files_path(), Backup_DIR) if cloud_sync: sync_folder(site,older_than_hrs,Backup_DIR, "public-files",did_not_upload,error_log) if cint(frappe.db.get_value("Backup Manager", None, "enable_private_files")): Backup_DIR = os.path.join(BASE_DIR, "private/files") compress_files(get_files_path(is_private=1), Backup_DIR,"private") if cloud_sync: sync_folder(site,older_than_hrs,Backup_DIR, "private-files",did_not_upload,error_log) frappe.db.close() # frappe.connect() return did_not_upload, list(set(error_log))
def __init__(self, site_path=None): print("Init AutoAssigner") if (site_path == None): site_path = get_site_base_path()[2:] + "/" scraper_folder = site_path + "assigner/scrapes/" combiner_folder = site_path + "assigner/data/" learner_folder = site_path + "assigner/model/" self.s = Scraper(scraper_folder) self.c = Combiner(combiner_folder) self.l = Learner(learner_folder) print("Initialized") #Create folders if not exist #if not os.path.exists("assigner"): # os.makedirs("assigner") if not os.path.exists(scraper_folder): os.makedirs(scraper_folder) if not os.path.exists(combiner_folder): os.makedirs(combiner_folder) if not os.path.exists(learner_folder): os.makedirs(learner_folder)
def get_snapshots_config_path(): return os.path.join(get_site_base_path(), "snapshots.json")