def mark_job_sent(job, cost, db_session=None): from library.mailer import email_success if not db_session: db_session = session job.mod_date = datetime.now() job.end_date = datetime.now() job.status = 'sent' db_session.commit() o('Deleting data lol.') job.delete_data(db_session) if job.account.email_success: email_success(job) if job.callback_url: send_job_callback(job, db_session) if os.environ.get('REQUIRE_PAYMENTS') == 'on': o('Debiting $%s on account ID %s' % (cost, job.account.id)) commit_transaction(job, cost, 'job_complete') return True
def fail(ref, job, db, debug=None, db_session=None): from library.mailer import email_fail from rq import Worker if not db_session: db_session = session try: device = Worker.MODEM_DEVICE except AttributeError: device = "" o('JOB FAILED: %s (%s)' % (ref, debug)) error = worker_error(ref) job.failed = 1 job.fail_code = error["code"] job.fail_date = datetime.now() job.mod_date = datetime.now() job.status = 'failed' job.debug = error["msg"] if debug == None else debug db_session.commit() if job.account.email_fail: email_fail(job, error["msg"], error["code"], error["status"], { "device": device, "output": debug }) if job.callback_url: send_job_callback(job, db_session) return "FAILED"
def convert_to_tiff(access_key, filename, flatten = False): from subprocess import check_output o('Convert %s to .tiff' % filename) ######################################## file_prefix, file_extension = os.path.splitext(filename) if not flatten: command = [ "convert", "-density", "400 ", "-resize", "1760x2200 ", "./tmp/" + access_key + "/" + filename, "fax:./tmp/" + access_key + "/"+ file_prefix +".%02d.tiff" ] else: command = [ "convert", "-density", "400 ", "-resize", "1760x2200 ", "-background", "White", "-alpha", "Background", "./tmp/" + access_key + "/" + filename, "fax:./tmp/" + access_key + "/"+ file_prefix +".%02d.tiff" ] output = check_output(command)
def fail(ref, job, db, debug=None): from library.mailer import email_fail from rq import Worker device = Worker.MODEM_DEVICE o('JOB FAILED: %s (%s)' % (ref, debug)) error = worker_error(ref) job.failed = 1 job.fail_code = error["code"] job.fail_date = datetime.now() job.mod_date = datetime.now() job.status = 'failed' job.debug = error["msg"] if debug == None else debug session.commit() if job.account.email_fail: email_fail(job, error["msg"], error["code"], error["status"], { "device": device, "output": debug } ) if job.callback_url: send_job_callback(job, db) return "FAILED"
def save_local_file(access_key, filename, data): o('Saving local file: %s' % filename) ###################################### f = open('./tmp/' + access_key + '/' + filename, 'wb') f.write(data) f.close()
def mark_job_sent(job, cost, db_session = None): from library.mailer import email_success if not db_session: db_session = session job.mod_date = datetime.now() job.end_date = datetime.now() job.status = 'sent' db_session.commit() o('Deleting data lol.') job.delete_data(db_session) if job.account.email_success: email_success(job) if job.callback_url: send_job_callback(job, db_session) if os.environ.get('REQUIRE_PAYMENTS') == 'on': o('Debiting $%s on account ID %s' % (cost, job.account.id)) commit_transaction(job, cost, 'job_complete') return True
def email_admin(message, subject = None): if not os.environ.get('SPARKPOST_API_KEY'): return False import requests url = 'https://api.sparkpost.com/api/v1/transmissions' payload = { "content": { "subject": subject if subject else "%s CRITICAL ERROR" % project, "html": message, "from": { "name": os.environ.get('EMAIL_FROM_NAME'), "email": os.environ.get('EMAIL_FROM') } }, "recipients": [{'address': os.environ.get('EMAIL_FROM')}] } try: response = requests.post(url, headers=email_headers, json=payload) except: o('SparkPost API Fail: %s' % response.text)
def delete_data(self, session=None): """ Removes job data from the server (including optionally Amazon S3). """ import shutil import os from boto.s3.connection import S3Connection from boto.s3.key import Key if not session: session = db.session self.data_deleted = 1 self.cover_name = None self.cover_address = None self.cover_city = None self.cover_state = None self.cover_zip = None self.cover_country = None self.cover_phone = None self.cover_email = None self.cover_company = None self.cover_to_name = None self.cover_cc = None self.cover_subject = None self.cover_status = None self.cover_comments = None self.body = None self.mod_date = datetime.now() session.commit() if os.path.isdir('./tmp/' + self.access_key): shutil.rmtree('./tmp/' + self.access_key) if os.environ.get('AWS_STORAGE') == "on": try: conn = S3Connection(os.environ.get('AWS_ACCESS_KEY'), os.environ.get('AWS_SECRET_KEY')) bucket = conn.get_bucket(os.environ.get('AWS_S3_BUCKET')) except: o("COULD NOT CONNECT TO S3 WTF WTF WTF WTF") return try: for i in range(0, self.num_pages): n = ("0%s" % i) if i < 10 else "%s" % i k = Key(bucket) k.key = 'fax/%s/%s.%s.tiff' % (self.access_key, self.filename, n) k.delete() except: o("COULD NOT DELETE FILES FROM LOCAL OMG SHIT") return True
def delete_data(self, session=None): """ Removes job data from the server (including optionally Amazon S3). """ import shutil import os from boto.s3.connection import S3Connection from boto.s3.key import Key if not session: session = db.session self.data_deleted = 1 self.cover_name = None self.cover_address = None self.cover_city = None self.cover_state = None self.cover_zip = None self.cover_country = None self.cover_phone = None self.cover_email = None self.cover_company = None self.cover_to_name = None self.cover_cc = None self.cover_subject = None self.cover_status = None self.cover_comments = None self.body = None self.mod_date = datetime.now() session.commit() if os.path.isdir('./tmp/' + self.access_key): shutil.rmtree('./tmp/' + self.access_key) if os.environ.get('AWS_STORAGE') == "on": try: conn = S3Connection(os.environ.get('AWS_ACCESS_KEY'), os.environ.get('AWS_SECRET_KEY')) bucket = conn.get_bucket(os.environ.get('AWS_S3_BUCKET')) except: o("COULD NOT CONNECT TO S3 WTF WTF WTF WTF") return try: for i in range(0, self.num_pages): n = ("0%s" % i) if i < 10 else "%s" % i k = Key(bucket) k.key ='fax/%s/%s.%s.tiff'%(self.access_key,self.filename,n) k.delete() except: o("COULD NOT DELETE FILES FROM S3 OMG SHIT") return True
def save_local_file(access_key, filename, data): from werkzeug.utils import secure_filename o('Saving local file: %s' % filename) ###################################### safe_filename = secure_filename(filename) f = open(u'./tmp/' + access_key + u'/' + safe_filename, 'wb') f.write(data) f.close() return safe_filename
def send_job_callback(job, db): import requests import traceback job.callback_date = datetime.now() try: r = requests.post(job.callback_url, data=job.public_data(), timeout=5) r.raise_for_status() o("Sent callback: %s; (HTTP 200)" % job.callback_url) except: o("CALLBACK FAIL: %s / %s" % (job.callback_url, traceback.format_exc())) job.callback_fail = job.callback_fail + 1 session.commit()
def send_job_callback(job, db_session): import requests import traceback job.callback_date = datetime.now() try: r = requests.post(job.callback_url, data=job.public_data(), timeout=5) r.raise_for_status() o("Sent callback: %s; (HTTP 200)" % job.callback_url) except: o("CALLBACK FAIL: %s / %s" % (job.callback_url, traceback.format_exc())) job.callback_fail = job.callback_fail + 1 db_session.commit()
def send_email(message, account = None, attach_name=None, attach_file=None, attach_mime=None): if not os.environ.get('SPARKPOST_API_KEY'): return False import requests import base64 url = 'https://api.sparkpost.com/api/v1/transmissions' payload = {} payload['content'] = message payload['content']['from'] = { "name": os.environ.get('EMAIL_FROM_NAME'), "email": os.environ.get('EMAIL_FROM') } if account: if account.first_name and account.last_name: payload['recipients'] = [{ 'address': { 'email': account.email, 'name': account.first_name + ' ' + account.last_name } }] else: payload['recipients'] = [{'address': account.email}] if attach_name and attach_file and attach_mime: with open(attach_file) as f: attach_data = base64.b64encode(f.read()) payload['content']['attachments'] = [ { "type": attach_mime, "name": attach_name, "data": attach_data } ] try: response = requests.post(url, headers=email_headers, json=payload) except: o('SparkPost API Fail: %s' % response.text)
def convert_to_tiff(access_key, filename): from subprocess import check_output o('Convert %s to .tiff' % filename) ######################################## file_prefix, file_extension = os.path.splitext(filename) command = [ "convert", "-density", "200 ", "./tmp/" + access_key + "/" + filename, # "-flatten", # lol "fax:./tmp/" + access_key + "/"+ file_prefix +".%02d.tiff" ] output = check_output(command)
def convert_txt_to_ps(access_key, filename): from subprocess import check_output o('Convert %s to .ps' % filename) ######################################## file_prefix, file_extension = os.path.splitext(filename) command = [ "paps", "--font", "Liberation Mono", "--cpi", " 12", # "--header", # JL NOTE ~ probably don't want to have this file_prefix ] output = check_output(command, cwd="./tmp/" + access_key) f = open('./tmp/' + access_key + '/' + filename, 'wb') f.write(output) f.close()
def convert_txt_to_ps(access_key, filename): from subprocess import check_output o('Convert %s to .ps' % filename) ######################################## file_prefix, file_extension = os.path.splitext(filename) command = [ "paps", "--font", "Droid Sans Mono", "--cpi", " 12", # "--header", # JL NOTE ~ probably don't want to have this file_prefix ] output = check_output(command, cwd="./tmp/"+access_key) f = open('./tmp/' + access_key + '/' + filename, 'wb') f.write(output) f.close()
def convert_to_tiff(access_key, filename, flatten=False): from subprocess import check_output o('Convert %s to .tiff' % filename) ######################################## file_prefix, file_extension = os.path.splitext(filename) if not flatten: command = [ "convert", "-density", "400 ", "-resize", "1760x2200 ", "./tmp/" + access_key + "/" + filename, "fax:./tmp/" + access_key + "/" + file_prefix + ".%02d.tiff" ] else: command = [ "convert", "-density", "400 ", "-resize", "1760x2200 ", "-background", "White", "-alpha", "Background", "./tmp/" + access_key + "/" + filename, "fax:./tmp/" + access_key + "/" + file_prefix + ".%02d.tiff" ] output = check_output(command)
def send_email(message, account = None): if not os.environ.get('MANDRILL_API_KEY'): return False import mandrill client = mandrill.Mandrill(os.environ.get('MANDRILL_API_KEY')) message['from_email'] = os.environ.get('EMAIL_FROM') message['from_name'] = os.environ.get('EMAIL_FROM_NAME') if account: if account.first_name and account.last_name: message['to'] = [{ 'email': account.email, 'name': account.first_name + ' ' + account.last_name }] else: message['to'] = [{'email': account.email}] try: client.messages.send(message=message, async=True) except mandrill.Error, e: o('A mandrill error occurred: %s - %s' % (e.__class__, e))
def bootstrap(): """ Implicitly creates a user account and logs them in by processing a Stripe payment and email. If the email address belongs to a registered account, it requires the account password to authorize the payment and login attempt. """ import json import stripe import sys import traceback from library.mailer import email_payment from datetime import datetime stripe.api_key = os.environ.get('STRIPE_SECRET_KEY') account_id = Account.authorize(request.values.get('api_key')) ip = fix_ip(request.headers.get('x-forwarded-for', request.remote_addr)) if request.method == 'POST': v = request.values.get emails = Account.query.filter_by(email=v('email')) account = emails.first() o("Received bootstrap payment login: %s" % v('email')) if account_id and account != None and account.id != account_id: o("Account exists but user is logged in as someone else. Error.") return jsonify(api_error('ACCOUNTS_LOGIN_ERROR')), 401 if account != None and account.password != password_hash(v('password'))\ and not account_id: o("Account exists but password mismatch. Erroring out.") return jsonify(api_error('ACCOUNTS_LOGIN_ERROR')), 401 temporary_password = None if account == None: o("Creating account with temporary password.") temporary_password = random_hash(v('email'))[:8] data = {'email': v('email'), 'password': temporary_password} try: account = Account(**data) except ValidationError, err: return jsonify(api_error(err.ref)), 400 try: account.validate() db.session.add(account) except IntegrityError: return jsonify(api_error('ACCOUNTS_CREATE_FAIL')), 400 o("Verifying payment with Stripe API.") failed = False try: payment = stripe.Charge.create(amount=int( float(v('amount')) * 100), currency="usd", source=v('stripe_token'), description="Bootstrap payment") except: o("STRIPE UNEXPECTED ERROR:", sys.exc_info()[0]) failed = True payment = {'_DEBUG': traceback.format_exc()} o(payment) if not failed and payment and payment.status == "succeeded": o("Payment success.") db.session.commit() data = { 'account_id': account.id, 'amount': v('amount'), 'source': 'stripe', 'source_id': payment.id, 'ip_address': ip, 'initial_balance': account.credit, 'trans_type': 'payment' } trans = Transaction(**data) db.session.add(trans) db.session.commit() o("Adding credit to account.") charged = float(payment.amount) / float(100) account.add_credit(charged) email_payment(account, charged, trans.id, payment.source.last4, temporary_password) else: o("--- PAYMENT FAIL. LOGGING INFO ---") db.session.expunge_all() data = { 'amount': v('amount'), 'account_id': account.id, 'source': 'stripe', 'debug': json.dumps(payment), 'ip_address': ip, 'payment_type': 'bootstrap' } failed_payment = FailedPayment(**data) db.session.add(failed_payment) db.session.commit() return jsonify(api_error('ACCOUNTS_PAYMENT_FAIL')), 400 result = account.public_data() if temporary_password: result['temporary_password'] = temporary_password return jsonify(result)
def create(): """Creates a new outgoing fax""" account_id = Account.authorize(request.values.get('api_key')) if account_id == None: return jsonify(api_error('API_UNAUTHORIZED')), 401 ip = fix_ip(request.headers.get('x-forwarded-for', request.remote_addr)) if request.method == 'POST': if request.files and 'file' in request.files: uploaded_file = request.files['file'] else: uploaded_file = None v = request.values.get if uploaded_file or v('body'): data = { 'account_id': account_id, 'ip_address': ip, 'destination': v('destination'), 'send_authorized': v('send_authorized', 0), 'cover': v('cover', 0), 'cover_name': v('cover_name'), 'cover_address': v('cover_address'), 'cover_city': v('cover_city'), 'cover_state': v('cover_state'), 'cover_zip': v('cover_zip'), 'cover_country': v('cover_country'), 'cover_phone': v('cover_phone'), 'cover_email': v('cover_email'), 'cover_company': v('cover_company'), 'cover_to_name': v('cover_to_name'), 'cover_cc': v('cover_cc'), 'cover_subject': v('cover_subject'), 'cover_status': v('cover_status', 'review'), 'cover_comments': v('cover_comments'), 'callback_url': v('callback_url') } if uploaded_file: data['filename'] = uploaded_file.filename else: data['body'] = v('body') o(data) try: job = Job(**data) job.validate() job.determine_international() except ValidationError, err: return jsonify(api_error(err.ref)), 400 db.session.add(job) db.session.commit() if uploaded_file: binary = uploaded_file.stream.read() else: binary = job.body.replace("\r\n", "\n").encode('utf-8') redis_conn = Redis.from_url(current_app.config['REDIS_URI']) q = Queue('high', connection=redis_conn) q.enqueue_call(func=initial_process, args=(job.id, binary), timeout=300) return jsonify(job.public_data()) else: return jsonify(api_error("JOBS_NO_ATTACHMENT")), 400
def charge_subscribers(): o("QUERYING INCOMING FAX NUMBERS FOR PAYMENT PROCESSING!") o(" ") conn = psycopg2.connect(os.environ.get('DATABASE_URI')) query = ( "SELECT id, account_id, fax_number, flagged_for_deletion, " " last_billed, create_date, mod_date " "FROM incoming_number " "WHERE ( " " mod_date IS NULL " " OR " " mod_date < now() - \'7 days\'::interval " " ) " "AND ( " " last_billed IS NULL " " OR " " last_billed < now() - \'30 days\'::interval " " ) ") cursor = conn.cursor() cursor.execute(query) for row in cursor: id = row[0] account_id = row[1] fax_number = row[2] flagged_for_deletion = row[3] last_billed = row[4] account = session.query(Account).get(account_id) incoming_number = session.query(IncomingNumber).get(id) account_status = "SUCCESS" o("Fax number %s for account %s" % (fax_number, account_id)) if account.credit < 6 and not account.allow_overflow: o(" - account credit below threshold") if account.stripe_card and account.auto_recharge: if not auto_recharge(account, "localhost", session) == True: account_status = "DECLINE" o(" - CARD DECLINED :(") else: o(" - payment succeeded :)") else: account_status = "NO_FUNDS" o(" - AUTO-CHARGED DISABLED AND NO FUNDS IN ACCOUNT :(") if not account_status == "SUCCESS" and flagged_for_deletion: o(" - number is already marked for deletion >_<") session.delete(incoming_number) session.commit() if not delete_number_from_phaxio(fax_number): email_admin("Failed to delete Phaxio number: %s" % fax_number) email_deleted_number(account) o(" - deleted number and emailed customer.") elif not account_status == "SUCCESS" and not flagged_for_deletion: incoming_number.mod_date = datetime.now() incoming_number.flagged_for_deletion = 1 session.commit() email_pending_deletion_warning(account, account_status, fax_number) o(" - flagged number for deletion, emailed warning to customer.") else: o(" - account successfully charged. hooray!") incoming_number.mod_date = datetime.now() incoming_number.last_billed = datetime.now() incoming_number.flagged_for_deletion = 0 session.commit() data = { 'account_id': account.id, 'amount': -6.00, 'source': "phaxio", 'source_id': fax_number, 'ip_address': "localhost", 'initial_balance': account.credit, 'trans_type': "incoming_autopay" } trans = Transaction(**data) session.add(trans) session.commit() account.subtract_credit(6, session) o(" ") conn.commit() conn.close() email_admin("YAY!", "Payments cron job success!") o("ALL DONE!") return "lulz"
def bootstrap(): """ Implicitly creates a user account and logs them in by processing a Stripe payment and email. If the email address belongs to a registered account, it requires the account password to authorize the payment and login attempt. """ import json import stripe import sys import traceback from library.mailer import email_payment from datetime import datetime stripe.api_key = os.environ.get('STRIPE_SECRET_KEY') account_id = Account.authorize(request.values.get('api_key')) ip = fix_ip(request.headers.get('x-forwarded-for', request.remote_addr)) if request.method == 'POST': v = request.values.get emails = Account.query.filter_by(email=v('email')) account = emails.first() o("Received bootstrap payment login: %s" % v('email')) if account_id and account != None and account.id != account_id: o("Account exists but user is logged in as someone else. Error.") return jsonify(api_error('ACCOUNTS_LOGIN_ERROR')), 401 if account != None and account.password != password_hash(v('password'))\ and not account_id: o("Account exists but password mismatch. Erroring out.") return jsonify(api_error('ACCOUNTS_LOGIN_ERROR')), 401 temporary_password = None if account == None: o("Creating account with temporary password.") temporary_password = random_hash(v('email'))[:8] data = { 'email': v('email'), 'password': temporary_password } try: account = Account(**data) except ValidationError, err: return jsonify(api_error(err.ref)), 400 try: account.validate() db.session.add(account) except IntegrityError: return jsonify(api_error('ACCOUNTS_CREATE_FAIL')), 400 o("Verifying payment with Stripe API.") failed = False try: payment = stripe.Charge.create( amount=int(float(v('amount')) * 100), currency="usd", source=v('stripe_token'), description="Bootstrap payment" ) except: o("STRIPE UNEXPECTED ERROR:", sys.exc_info()[0]) failed = True payment = {'_DEBUG': traceback.format_exc()} o(payment) if not failed and payment and payment.status == "succeeded": o("Payment success.") db.session.commit() data = { 'account_id': account.id, 'amount': v('amount'), 'source': 'stripe', 'source_id': payment.id, 'ip_address': ip, 'initial_balance': account.credit, 'trans_type': 'payment' } trans = Transaction(**data) db.session.add(trans) db.session.commit() o("Adding credit to account.") charged = float(payment.amount) / float(100) account.add_credit(charged) email_payment(account, charged, trans.id, payment.source.last4, temporary_password) else: o("--- PAYMENT FAIL. LOGGING INFO ---") db.session.expunge_all() data = { 'amount': v('amount'), 'account_id': account.id, 'source': 'stripe', 'debug': json.dumps(payment), 'ip_address': ip, 'payment_type': 'bootstrap' } failed_payment = FailedPayment(**data) db.session.add(failed_payment) db.session.commit() return jsonify(api_error('ACCOUNTS_PAYMENT_FAIL')), 400 result = account.public_data() if temporary_password: result['temporary_password'] = temporary_password return jsonify(result)
file_prefix, file_extension = os.path.splitext(f) if file_extension == '.tiff': num_pages = num_pages + 1 job.status = 'processing' job.mod_date = datetime.now() job.num_pages = num_pages job.compute_cost() session.commit() ######################################################################## # OPTIONAL AMAZON S3 UPLOAD PHASE ######################################################################## if os.environ.get('AWS_STORAGE') == "on": o('Connecting to S3') ############################################## try: conn = S3Connection(os.environ.get('AWS_ACCESS_KEY'), os.environ.get('AWS_SECRET_KEY')) bucket = conn.get_bucket(os.environ.get('AWS_S3_BUCKET')) except: return fail('JOBS_CONNECT_S3_FAIL', job, db) for f in files: try: file_prefix, file_extension = os.path.splitext(f) if file_extension == '.tiff': o('Uploading to S3: ' + f) ############################# k = Key(bucket) k.key = 'fax/' + job.access_key + '/' + f
def initial_process(id, data=None): from boto.s3.connection import S3Connection from boto.s3.key import Key from subprocess import call, check_output, CalledProcessError from os.path import isfile, join import redis from rq import Worker, Queue, Connection from poster.encode import multipart_encode from poster.streaminghttp import register_openers import urllib2 import requests job = session.query(Job).get(id) job.mod_date = datetime.now() job.status = 'uploading' session.commit() ######################################################################## # G3 TIFF CONVERSION PHASE ######################################################################## path = './tmp/' + job.access_key o('Creating temporary directory: %s' % path) ########################### try: if not os.path.exists(path): os.makedirs(path) except: return fail('JOBS_CREATE_DIR_FAIL', job, db) if job.filename: file_prefix, file_extension = os.path.splitext(job.filename) elif job.body: file_extension = '.txt' else: return fail('JOBS_NO_ATTACHMENT_FOUND', job, db) if file_extension == '.docx' or file_extension == '.doc': if not os.environ.get('SERVEOFFICE2PDF_URL'): return fail('JOBS_NO_SERVEOFFICE2PDF', job, db) try: job.filename = save_local_file(job.access_key, job.filename, data) except: return fail('JOBS_LOCAL_SAVE_FAIL', job, db) o('Calling ServeOffice2PDF to convert DOCX to PDF.') ############### office_url = os.environ.get('SERVEOFFICE2PDF_URL').strip() files = {"data": open('%s/%s' % (path, job.filename), 'rb')} params = {'filename': job.filename} response = requests.post(office_url, files=files, data=params) if not response.status_code == 200: return fail('JOBS_PDF_CONVERT_FAIL', job, db) o('Successfully converted file to PDF. Now converting to tiff.') ### pdf_file = open('%s/%s.pdf' % (path, job.filename), 'wb') pdf_file.write(response.content) pdf_file.close() try: convert_to_tiff(job.access_key, job.filename + '.pdf', True) except CalledProcessError, e: return fail('JOBS_IMG_CONVERT_FAIL', job, db, str(e))
def send_fax(id): from boto.s3.connection import S3Connection from boto.s3.key import Key from datetime import date from subprocess import check_output, CalledProcessError, STDOUT import stripe import traceback import json from library.mailer import email_recharge_payment, email_success from rq import Worker device = Worker.MODEM_DEVICE caller_id = Worker.CALLER_ID stripe.api_key = os.environ.get('STRIPE_SECRET_KEY') job = session.query(Job).get(id) if not job.status == 'ready' and not job.status == 'queued': return fail('JOBS_CANNOT_SEND_NOW', job, db) if job.data_deleted: return fail('JOBS_FAIL_DATA_DELETED', job, db) cost = job.cost if not job.cover else job.cost + job.cover_cost ######################################################################## # MAKE SURE THE CUSTOMER ACTUALLY HAS MONEY PHASE ######################################################################## if os.environ.get('REQUIRE_PAYMENTS') == 'on': if job.account.credit - cost < 0 and not job.account.allow_overflow: if job.account.stripe_card and job.account.auto_recharge: try: charge = stripe.Charge.create( amount=1000, currency="usd", customer=job.account.stripe_token, description="Auto-recharging account %s"% job.account.id ) data = { 'account_id': job.account.id, 'amount': 10, 'source': 'stripe', 'source_id': charge["id"], 'job_id': job.id, 'job_destination': job.destination, 'ip_address': job.ip_address, 'initial_balance': job.account.credit, 'trans_type': 'auto_recharge' } trans = Transaction(**data) session.add(trans) session.commit() job.account.add_credit(10, session) email_recharge_payment(job.account, 10, trans.id, charge.source.last4) except: payment = {'_DEBUG': traceback.format_exc()} data = { 'amount': 10, 'account_id': job.account.id, 'source': 'stripe', 'debug': json.dumps(payment), 'ip_address': job.ip_address, 'payment_type': 'auto_recharge' } failed_payment = FailedPayment(**data) session.add(failed_payment) session.commit() # JL TODO ~ Notify customer that the card was declined return fail('JOBS_CARD_DECLINED', job, db) else: return fail('JOBS_INSUFFICIENT_CREDIT', job, db) job.mod_date = datetime.now() job.start_date = datetime.now() job.attempts = job.attempts + 1 job.device = device job.status = 'started' session.commit() files_to_send = [] ######################################################################## # COVER SHEET GENERATION PHASE ######################################################################## path = './tmp/' + job.access_key o('Touching temporary directory: %s' % path) ########################### try: if not os.path.exists(path): os.makedirs(path) except: return fail('JOBS_CREATE_DIR_FAIL', job, db) if job.cover: o('Generating cover sheet') ######################################## try: o('Generating cover.png') ###################################### cmd = ["convert", "-density", "400", "-flatten", "./media/cover_sheets/default.pdf", "-gravity", "None"] v = 300 if job.cover_name: cmd.extend(["-annotate", "+468+%s" % v, job.cover_name]) cmd.extend(["-annotate", "+2100+1215", job.cover_name]) v = v + 80 if job.cover_address: cmd.extend(["-annotate", "+468+%s" % v, job.cover_address]) v = v + 80 if job.cover_city or job.cover_state or job.cover_zip: cmd.extend(["-annotate", "+468+%s" % v, "%s, %s %s" % (job.cover_city, job.cover_state, job.cover_zip)]) v = v + 80 if job.cover_country: cmd.extend(["-annotate", "+468+%s" % v, job.cover_country]) v = v + 80 if job.cover_phone: cmd.extend(["-annotate", "+468+%s" % v, job.cover_phone]) v = v + 80 if job.cover_email: cmd.extend(["-annotate", "+468+%s" % v, job.cover_email]) v = v + 80 if job.cover_to_name: cmd.extend(["-annotate", "+800+1215", job.cover_to_name]) if job.cover_subject: cmd.extend(["-annotate", "+800+1340", job.cover_subject]) cmd.extend(["-annotate", "+2100+1340", "%s" % job.num_pages]) cmd.extend(["-annotate", "+800+1465", "%s" % date.today()]) if job.cover_cc: cmd.extend(["-annotate", "+2100+1465", job.cover_cc]) if "urgent" in job.cover_status: cmd.extend(["-annotate", "+473+1740", "X"]) if "review" in job.cover_status: cmd.extend(["-annotate", "+825+1740", "X"]) if "comment" in job.cover_status: cmd.extend(["-annotate", "+1285+1740", "X"]) if "reply" in job.cover_status: cmd.extend(["-annotate", "+1910+1740", "X"]) if "shred" in job.cover_status: cmd.extend(["-annotate", "+2420+1740", "X"]) cmd.extend([ "-pointsize", "11", "./tmp/" + job.access_key + "/cover.png" ]) output = check_output(cmd) except CalledProcessError, e: return fail('JOBS_COVER_MAIN_FAIL', job, db, str(e)) if job.cover_company: try: o('Generating company.png') ################################ cmd = ["convert", "-density", "400", "-gravity", "Center", "-background", "black", "-fill", "white", "-pointsize", "20", "-size", "1400x", "caption:%s" % job.cover_company, "-bordercolor", "black", "-border", "30", "./tmp/" + job.access_key + "/company.png"] output = check_output(cmd) except CalledProcessError, e: return fail('JOBS_COVER_COMPANY_FAIL', job, db, str(e)) try: o('Overlaying company.png on cover.png') ################### cmd = ["composite", "-density", "400", "-gravity", "NorthEast", "-geometry", "+300+200", "./tmp/" + job.access_key + "/company.png", "./tmp/" + job.access_key + "/cover.png", "./tmp/" + job.access_key + "/cover.png"] output = check_output(cmd) except CalledProcessError, e: return fail('JOBS_COVER_OVERLAY_FAIL', job, db, str(e))
def initial_process(id, data = None): from boto.s3.connection import S3Connection from boto.s3.key import Key from subprocess import call, check_output, CalledProcessError from os.path import isfile, join import redis from rq import Worker, Queue, Connection from poster.encode import multipart_encode from poster.streaminghttp import register_openers import urllib2 import requests job = session.query(Job).get(id) job.mod_date = datetime.now() job.status = 'uploading' session.commit() ######################################################################## # G3 TIFF CONVERSION PHASE ######################################################################## path = './tmp/' + job.access_key o('Creating temporary directory: %s' % path) ########################### try: if not os.path.exists(path): os.makedirs(path) except: return fail('JOBS_CREATE_DIR_FAIL', job, db) if job.filename: file_prefix, file_extension = os.path.splitext(job.filename) elif job.body: file_extension = '.txt' else: return fail('JOBS_NO_ATTACHMENT_FOUND', job, db) if file_extension == '.docx' or file_extension == '.doc': if not os.environ.get('SERVEOFFICE2PDF_URL'): return fail('JOBS_NO_SERVEOFFICE2PDF', job, db) try: save_local_file(job.access_key, job.filename, data) except: return fail('JOBS_LOCAL_SAVE_FAIL', job, db) o('Calling ServeOffice2PDF to convert DOCX to PDF.') ############### office_url = os.environ.get('SERVEOFFICE2PDF_URL').strip() files = {"data": open('%s/%s' % (path, job.filename), 'rb')} params = {'filename': job.filename} response = requests.post(office_url, files=files, data=params) if not response.status_code == 200: return fail('JOBS_PDF_CONVERT_FAIL', job, db) o('Successfully converted file to PDF. Now converting to tiff.') ### pdf_file = open('%s/%s.pdf' % (path, job.filename), 'wb') pdf_file.write(response.content) pdf_file.close() try: convert_to_tiff(job.access_key, job.filename + '.pdf') except CalledProcessError, e: return fail('JOBS_IMG_CONVERT_FAIL', job, db, str(e))
else: cost_per_page = float(os.environ.get('DEFAULT_COST_PER_PAGE', '0.06')) job.status = 'processing' job.mod_date = datetime.now() job.num_pages = num_pages job.cost = cost_per_page * num_pages job.cover_cost = cost_per_page session.commit() ######################################################################## # OPTIONAL AMAZON S3 UPLOAD PHASE ######################################################################## if os.environ.get('AWS_STORAGE') == "on": o('Connecting to S3') ############################################## try: conn = S3Connection(os.environ.get('AWS_ACCESS_KEY'), os.environ.get('AWS_SECRET_KEY')) bucket = conn.get_bucket(os.environ.get('AWS_S3_BUCKET')) except: return fail('JOBS_CONNECT_S3_FAIL', job, db) for f in files: try: file_prefix, file_extension = os.path.splitext(f) if file_extension == '.tiff': o('Uploading to S3: ' + f) ############################# k = Key(bucket) k.key = 'fax/' + job.access_key + '/' + f
def create(): """Creates a new outgoing fax""" account_id = Account.authorize(request.values.get('api_key')) if account_id == None: return jsonify(api_error('API_UNAUTHORIZED')), 401 ip = fix_ip(request.headers.get('x-forwarded-for', request.remote_addr)) if request.method == 'POST': uploaded_file = request.files['file'] v = request.values.get if uploaded_file or v('body'): data = { 'account_id': account_id, 'ip_address': ip, 'destination': v('destination'), 'send_authorized': v('send_authorized', 0), 'cover': v('cover', 0), 'cover_name': v('cover_name'), 'cover_address': v('cover_address'), 'cover_city': v('cover_city'), 'cover_state': v('cover_state'), 'cover_zip': v('cover_zip'), 'cover_country': v('cover_country'), 'cover_phone': v('cover_phone'), 'cover_email': v('cover_email'), 'cover_company': v('cover_company'), 'cover_to_name': v('cover_to_name'), 'cover_cc': v('cover_cc'), 'cover_subject': v('cover_subject'), 'cover_status': v('cover_status','review'), 'cover_comments': v('cover_comments'), 'callback_url': v('callback_url') } if uploaded_file: data['filename'] = uploaded_file.filename else: data['body'] = v('body') o(data) try: job = Job(**data); job.validate() except ValidationError, err: return jsonify(api_error(err.ref)), 400 db.session.add(job) db.session.commit() if uploaded_file: binary = uploaded_file.stream.read() else: binary = job.body.replace("\r\n", "\n").encode('utf-8') redis_conn = Redis.from_url(current_app.config['REDIS_URI']) q = Queue('high', connection=redis_conn) q.enqueue_call(func=initial_process, args=(job.id, binary), timeout=300) return jsonify(job.public_data()) else: return jsonify(api_error("JOBS_NO_ATTACHMENT")), 400
def send_fax(id): from boto.s3.connection import S3Connection from boto.s3.key import Key from datetime import date from subprocess import check_output, CalledProcessError, STDOUT import stripe import traceback import json from library.mailer import email_recharge_payment from rq import Worker device = Worker.MODEM_DEVICE caller_id = Worker.CALLER_ID stripe.api_key = os.environ.get('STRIPE_SECRET_KEY') job = session.query(Job).get(id) if not job.status == 'ready' and not job.status == 'queued': return fail('JOBS_CANNOT_SEND_NOW', job, db) if job.data_deleted: return fail('JOBS_FAIL_DATA_DELETED', job, db) cost = job.cost if not job.cover else job.cost + job.cover_cost ######################################################################## # MAKE SURE THE CUSTOMER ACTUALLY HAS MONEY PHASE ######################################################################## if os.environ.get('REQUIRE_PAYMENTS') == 'on': if job.account.credit - cost < 0 and not job.account.allow_overflow: if job.account.stripe_card and job.account.auto_recharge: try: charge = stripe.Charge.create( amount=1000, currency="usd", customer=job.account.stripe_token, description="Auto-recharging account %s" % job.account.id) data = { 'account_id': job.account.id, 'amount': 10, 'source': 'stripe', 'source_id': charge["id"], 'job_id': job.id, 'job_destination': job.destination, 'ip_address': job.ip_address, 'initial_balance': job.account.credit, 'trans_type': 'auto_recharge' } trans = Transaction(**data) session.add(trans) session.commit() job.account.add_credit(10, session) # email_recharge_payment(job.account, 10, trans.id, # charge.source.last4) except: payment = {'_DEBUG': traceback.format_exc()} data = { 'amount': 10, 'account_id': job.account.id, 'source': 'stripe', 'debug': json.dumps(payment), 'ip_address': job.ip_address, 'payment_type': 'auto_recharge' } failed_payment = FailedPayment(**data) session.add(failed_payment) session.commit() # JL TODO ~ Notify customer that the card was declined return fail('JOBS_CARD_DECLINED', job, db) else: return fail('JOBS_INSUFFICIENT_CREDIT', job, db) job.mod_date = datetime.now() job.start_date = datetime.now() job.attempts = job.attempts + 1 job.device = device job.status = 'started' session.commit() files_to_send = [] ######################################################################## # COVER SHEET GENERATION PHASE ######################################################################## path = './tmp/' + job.access_key o('Touching temporary directory: %s' % path) ########################### try: if not os.path.exists(path): os.makedirs(path) except: return fail('JOBS_CREATE_DIR_FAIL', job, db) if job.cover: o('Generating cover sheet') ######################################## try: o('Generating cover.png') ###################################### cmd = [ "convert", "-density", "400", "-flatten", "./media/cover_sheets/default.pdf", "-gravity", "None" ] v = 300 if job.cover_name: cmd.extend(["-annotate", "+468+%s" % v, job.cover_name]) cmd.extend(["-annotate", "+2100+1215", job.cover_name]) v = v + 80 if job.cover_address: cmd.extend(["-annotate", "+468+%s" % v, job.cover_address]) v = v + 80 if job.cover_city or job.cover_state or job.cover_zip: cmd.extend([ "-annotate", "+468+%s" % v, "%s, %s %s" % (job.cover_city, job.cover_state, job.cover_zip) ]) v = v + 80 if job.cover_country: cmd.extend(["-annotate", "+468+%s" % v, job.cover_country]) v = v + 80 if job.cover_phone: cmd.extend(["-annotate", "+468+%s" % v, job.cover_phone]) v = v + 80 if job.cover_email: cmd.extend(["-annotate", "+468+%s" % v, job.cover_email]) v = v + 80 if job.cover_to_name: cmd.extend(["-annotate", "+800+1215", job.cover_to_name]) if job.cover_subject: cmd.extend(["-annotate", "+800+1340", job.cover_subject]) cmd.extend(["-annotate", "+2100+1340", "%s" % job.num_pages]) cmd.extend(["-annotate", "+800+1465", "%s" % date.today()]) if job.cover_cc: cmd.extend(["-annotate", "+2100+1465", job.cover_cc]) if "urgent" in job.cover_status: cmd.extend(["-annotate", "+473+1740", "X"]) if "review" in job.cover_status: cmd.extend(["-annotate", "+825+1740", "X"]) if "comment" in job.cover_status: cmd.extend(["-annotate", "+1285+1740", "X"]) if "reply" in job.cover_status: cmd.extend(["-annotate", "+1910+1740", "X"]) if "shred" in job.cover_status: cmd.extend(["-annotate", "+2420+1740", "X"]) cmd.extend( ["-pointsize", "11", "./tmp/" + job.access_key + "/cover.png"]) output = check_output(cmd) except CalledProcessError, e: return fail('JOBS_COVER_MAIN_FAIL', job, db, str(e)) if job.cover_company: try: o('Generating company.png') ################################ cmd = [ "convert", "-density", "400", "-gravity", "Center", "-background", "black", "-fill", "white", "-pointsize", "20", "-size", "1400x", "caption:%s" % job.cover_company, "-bordercolor", "black", "-border", "30", "./tmp/" + job.access_key + "/company.png" ] output = check_output(cmd) except CalledProcessError, e: return fail('JOBS_COVER_COMPANY_FAIL', job, db, str(e)) try: o('Overlaying company.png on cover.png') ################### cmd = [ "composite", "-density", "400", "-gravity", "NorthEast", "-geometry", "+300+200", "./tmp/" + job.access_key + "/company.png", "./tmp/" + job.access_key + "/cover.png", "./tmp/" + job.access_key + "/cover.png" ] output = check_output(cmd) except CalledProcessError, e: return fail('JOBS_COVER_OVERLAY_FAIL', job, db, str(e))
def charge_subscribers(): o("QUERYING INCOMING FAX NUMBERS FOR PAYMENT PROCESSING!") o(" ") conn = psycopg2.connect(os.environ.get('DATABASE_URI')) query = ( "SELECT id, account_id, fax_number, flagged_for_deletion, " " last_billed, create_date, mod_date " "FROM incoming_number " "WHERE ( " " mod_date IS NULL " " OR " " mod_date < now() - \'7 days\'::interval " " ) " "AND ( " " last_billed IS NULL " " OR " " last_billed < now() - \'30 days\'::interval " " ) " ) cursor = conn.cursor() cursor.execute(query) for row in cursor: id = row[0] account_id = row[1] fax_number = row[2] flagged_for_deletion = row[3] last_billed = row[4] account = session.query(Account).get(account_id) incoming_number = session.query(IncomingNumber).get(id) account_status = "SUCCESS" o("Fax number %s for account %s" % (fax_number, account_id)) if account.credit < 6 and not account.allow_overflow: o(" - account credit below threshold") if account.stripe_card and account.auto_recharge: if not auto_recharge(account, "localhost", session) == True: account_status = "DECLINE" o(" - CARD DECLINED :(") else: o(" - payment succeeded :)") else: account_status = "NO_FUNDS" o(" - AUTO-CHARGED DISABLED AND NO FUNDS IN ACCOUNT :(") if not account_status == "SUCCESS" and flagged_for_deletion: o(" - number is already marked for deletion >_<") session.delete(incoming_number) session.commit() if not delete_number_from_phaxio(fax_number): email_admin("Failed to delete Phaxio number: %s" % fax_number) email_deleted_number(account) o(" - deleted number and emailed customer.") elif not account_status == "SUCCESS" and not flagged_for_deletion: incoming_number.mod_date = datetime.now() incoming_number.flagged_for_deletion = 1 session.commit() email_pending_deletion_warning(account, account_status, fax_number) o(" - flagged number for deletion, emailed warning to customer.") else: o(" - account successfully charged. hooray!") incoming_number.mod_date = datetime.now() incoming_number.last_billed = datetime.now() incoming_number.flagged_for_deletion = 0 session.commit() data = { 'account_id': account.id, 'amount': -6.00, 'source': "phaxio", 'source_id': fax_number, 'ip_address': "localhost", 'initial_balance': account.credit, 'trans_type': "incoming_autopay" } trans = Transaction(**data) session.add(trans) session.commit() account.subtract_credit(6, session) o(" ") conn.commit() conn.close() email_admin("YAY!", "Payments cron job success!") o("ALL DONE!") return "lulz"