Example #1
1
 def onchange_company_id(self, cr, uid, ids, company_id, context=None):
     # update related fields
     values = {}
     values["currency_id"] = False
     if company_id:
         company = self.pool.get("res.company").browse(cr, uid, company_id, context=context)
         has_chart_of_accounts = company_id not in self.pool.get("account.installer").get_unconfigured_cmp(cr, uid)
         fiscalyear_count = self.pool.get("account.fiscalyear").search_count(
             cr,
             uid,
             [
                 ("date_start", "<=", time.strftime("%Y-%m-%d")),
                 ("date_stop", ">=", time.strftime("%Y-%m-%d")),
                 ("company_id", "=", company_id),
             ],
         )
         date_start, date_stop, period = self._get_default_fiscalyear_data(cr, uid, company_id, context=context)
         values = {
             "expects_chart_of_accounts": company.expects_chart_of_accounts,
             "currency_id": company.currency_id.id,
             "paypal_account": company.paypal_account,
             "company_footer": company.rml_footer,
             "has_chart_of_accounts": has_chart_of_accounts,
             "has_fiscal_year": bool(fiscalyear_count),
             "chart_template_id": False,
             "tax_calculation_rounding_method": company.tax_calculation_rounding_method,
             "date_start": date_start,
             "date_stop": date_stop,
             "period": period,
         }
         # update journals and sequences
         for journal_type in ("sale", "sale_refund", "purchase", "purchase_refund"):
             for suffix in ("_journal_id", "_sequence_prefix", "_sequence_next"):
                 values[journal_type + suffix] = False
         journal_obj = self.pool.get("account.journal")
         journal_ids = journal_obj.search(cr, uid, [("company_id", "=", company_id)])
         for journal in journal_obj.browse(cr, uid, journal_ids):
             if journal.type in ("sale", "sale_refund", "purchase", "purchase_refund"):
                 values.update(
                     {
                         journal.type + "_journal_id": journal.id,
                         journal.type + "_sequence_prefix": journal.sequence_id.prefix,
                         journal.type + "_sequence_next": journal.sequence_id.number_next,
                     }
                 )
         # update taxes
         ir_values = self.pool.get("ir.values")
         taxes_id = ir_values.get_default(cr, uid, "product.product", "taxes_id", company_id=company_id)
         supplier_taxes_id = ir_values.get_default(
             cr, uid, "product.product", "supplier_taxes_id", company_id=company_id
         )
         values.update(
             {
                 "default_sale_tax": isinstance(taxes_id, list) and taxes_id[0] or taxes_id,
                 "default_purchase_tax": isinstance(supplier_taxes_id, list)
                 and supplier_taxes_id[0]
                 or supplier_taxes_id,
             }
         )
     return {"value": values}
def main():
    dir = "/mfs/backup/mysql/"
    pattern = os.path.join(dir, "*/snapshots/*")
    # pattern = os.path.join(dir, '*/binlogs/*')
    # pattern = os.path.join('/mfs/user/dba/recovery/*')

    total_size = 0
    for i in glob.glob(pattern):
        info = os.stat(i)
        t1 = time.gmtime(info.st_ctime)
        t2 = time.strftime("%Y-%m-%d", t1)
        year, month, day = t2.split("-")
        time1 = datetime.datetime(int(year), int(month), int(day))

        m1 = time.gmtime()
        m2 = time.strftime("%Y-%m-%d", m1)
        year, month, day = m2.split("-")
        time2 = datetime.datetime(int(year), int(month), int(day))

        days = (time2 - time1).days
        # print  time1, time2, days

        if days > 20:
            total_size = total_size + get_directory_size(i) / 1024 ** 3
            print i, time1, total_size
            try:
                pass
                shutil.rmtree(i)
                # os.remove(i)
            except Exception as exc:
                print i, exc
Example #3
1
 def test_strftime(self):
     tt = time.gmtime(self.t)
     for directive in (
         "a",
         "A",
         "b",
         "B",
         "c",
         "d",
         "H",
         "I",
         "j",
         "m",
         "M",
         "p",
         "S",
         "U",
         "w",
         "W",
         "x",
         "X",
         "y",
         "Y",
         "Z",
         "%",
     ):
         format = " %" + directive
         try:
             time.strftime(format, tt)
         except ValueError:
             self.fail("conversion specifier: %r failed." % format)
Example #4
1
    def create_statement(self, cr, uid, line_invoice, partner, amount, journal, date_bank=None, account_id=None):
        bank_stmt_id = self.acc_bank_stmt_model.create(
            cr, uid, {"journal_id": journal, "date": date_bank or time.strftime("%Y") + "-07-01"}
        )

        bank_stmt_line_id = self.acc_bank_stmt_line_model.create(
            cr,
            uid,
            {
                "name": "payment",
                "statement_id": bank_stmt_id,
                "partner_id": partner,
                "amount": amount,
                "date": date_bank or time.strftime("%Y") + "-07-01",
            },
        )

        val = {
            "credit": amount > 0 and amount or 0,
            "debit": amount < 0 and amount * -1 or 0,
            "name": line_invoice and line_invoice.name or "cash flow",
        }

        if line_invoice:
            val.update({"counterpart_move_line_id": line_invoice.id})

        if account_id:
            val.update({"account_id": account_id})

        self.acc_bank_stmt_line_model.process_reconciliation(cr, uid, bank_stmt_line_id, [val])

        move_line_ids_complete = self.acc_bank_stmt_model.browse(cr, uid, bank_stmt_id).move_line_ids

        return move_line_ids_complete
Example #5
1
 def timeout(self):
     self.label1.setStyleSheet(self.styles[self.flag])
     self.flag = not self.flag
     self.label2.setStyleSheet(self.styles[self.flag])
     self.count += 1
     if self.count < 10:
         return  # label flash 10 times
     self.count = 0
     if self.que.qsize() != 0:
         return  # avoid duplication
     try:
         with pyodbc.connect(strConn).cursor() as c:
             c.execute("Select id,requestdoc,clientinfo from [docserver] where servermsg='PENDING'")
             if c.rowcount != 0:
                 for idx, reqdoc, client in c:
                     self.que.put((idx, reqdoc))  # put into queue
                     self.list1.insertItem(0, time.strftime("%Y-%m-%d %H:%M:%S") + " [" + client + "]:" + reqdoc)
                 # process requests
                 workers = int((self.que.qsize() - 1) / 20) + 1
                 for x in xrange(workers):
                     Worker(x, self.que, self.list2)
         self.timer.setInterval(1000 * 1)
     except Exception as e:
         self.label1.setStyleSheet(self.styles[2])
         self.label2.setStyleSheet(self.styles[2])
         self.timer.setInterval(1000 * 6)
         self.list1.insertItem(0, "%s error:%s" % (time.strftime("%Y-%m-%d %H:%M:%S"), e))
Example #6
1
def turn_logging_on(verbose=1):
    ip = get_ipython()
    if ip is None:
        return
    import IPython

    ipy_version = LooseVersion(IPython.__version__)
    if ipy_version < LooseVersion("0.11"):
        if verbose == 1:
            print("Logging is not supported by this version of IPython")
        return
    elif ip.logger.log_active is True:
        if verbose == 1:
            print("Already logging to " + ip.logger.logfname)
        return

    filename = os.path.join(os.getcwd(), "hyperspy_log.py")
    new = not os.path.exists(filename)
    ip.logger.logstart(logfname=filename, logmode="append")
    if new:
        ip.logger.log_write(
            "#!/usr/bin/env python \n"
            "# ============================\n"
            "# %s \n" % strftime("%Y-%m-%d") + "# %s \n" % strftime("%H:%M") + "# ============================\n"
        )
    if verbose == 1:
        print("\nLogging is active")
        print("The log is stored in the hyperspy_log.py file" " in the current directory")
def main(argv):
    optparse_usage = "find_NADHreaction_comp.py -i <NADHreaction_id> -r <root_dir>"
    parser = OptionParser(usage=optparse_usage)
    parser.add_option(
        "-i",
        "--inputNADHreaction",
        action="store",
        type="string",
        dest="input_NADH",
        help="The input text file of reaction id list, which are related to NADH",
    )
    parser.add_option(
        "-r",
        "--rootDir",
        action="store",
        type="string",
        dest="root_dir",
        help="The root directory. All files are generated here.",
    )

    (options, args) = parser.parse_args()
    if options.input_NADH:
        input_NADH = os.path.abspath(options.input_NADH)
    else:
        print "ERROR: please provide proper input file name"
    if options.root_dir:
        root_dir = os.path.abspath(options.root_dir)
    else:
        print "ERROR: please provide proper root direcotory"
    # Run Functions
    print "START time:\t%s" % (strftime("%Y-%m-%d %H:%M:%S"))
    make_file(input_NADH, root_dir)
    print "END time:\t%s" % (strftime("%Y-%m-%d %H:%M:%S"))
Example #8
0
def backup():
    global entry_source
    global entry_target
    source = entry_source.get()
    target_dir = entry_target.get()
    # source=['/home/shiyanlou/Code/']
    # target_dir='/home/shiyanlou/Desktop/'

    today_dir = target_dir + time.strftime("%Y%m%d")
    time_dir = time.strftime("%H%M%S")

    touch = today_dir + os.sep + time_dir + ".zip"
    # target=target_dir+time.strftime('%Y%m%d%H%M%S')+'.zip'
    # zip_command="zip -qr %s %s" %(target,' '.join(source))
    # command_touch="zip -qr "+touch+' '+' '.join(source)
    command_touch = "zip -qr " + touch + " " + source
    print command_touch
    print source
    print target_dir
    if os.path.exists(today_dir) == 0:
        os.mkdir(today_dir)
    if os.system(command_touch) == 0:
        # if os.system(zip_command)==0:
        print "Successful backup"
    else:
        print "Backup Failed"
Example #9
0
 def png(self, start_timestamp, end_timestamp):
     self.load(start_timestamp, end_timestamp)
     plt.figure(figsize=(10, 7.52))
     plt.rc("axes", labelsize=12, titlesize=14)
     plt.rc("font", size=10)
     plt.rc("legend", fontsize=7)
     plt.rc("xtick", labelsize=8)
     plt.rc("ytick", labelsize=8)
     plt.axes([0.08, 0.08, 1 - 0.27, 1 - 0.15])
     for plot in self.plots:
         plt.plot(self.timestamps, self.plots[plot], self.series_fmt(plot), label=self.series_label(plot))
     plt.axis("tight")
     plt.gca().xaxis.set_major_formatter(
         matplotlib.ticker.FuncFormatter(lambda x, pos=None: time.strftime("%H:%M\n%b %d", time.localtime(x)))
     )
     plt.gca().yaxis.set_major_formatter(
         matplotlib.ticker.FuncFormatter(lambda x, pos=None: locale.format("%.*f", (0, x), True))
     )
     plt.grid(True)
     plt.legend(loc=(1.003, 0))
     plt.xlabel("Time/Date")
     plt.title(
         self.description()
         + "\n%s to %s"
         % (
             time.strftime("%H:%M %d-%b-%Y", time.localtime(start_timestamp)),
             time.strftime("%H:%M %d-%b-%Y", time.localtime(end_timestamp)),
         )
     )
     output_buffer = StringIO.StringIO()
     plt.savefig(output_buffer, format="png")
     return output_buffer.getvalue()
Example #10
0
 def newFunc(*args, **args2):
     t0 = time.time()
     print "@%s, {%s} start" % (time.strftime("%X", time.localtime()), func.__name__)
     back = func(*args, **args2)
     print "@%s, {%s} end" % (time.strftime("%X", time.localtime()), func.__name__)
     print "@%.3fs taken for {%s}" % (time.time() - t0, func.__name__)
     return back
def logout(environ, start_response):
    """
    Expire the tiddlyweb_user cookie when a POST is received.
    """
    redirect = environ["tiddlyweb.query"].get("tiddlyweb_redirect", [None])[0]
    if redirect:
        uri = server_base_url(environ) + redirect.encode("UTF-8")
    else:
        uri = environ.get("HTTP_REFERER", server_base_url(environ) + environ["tiddlyweb.config"].get("logout_uri", "/"))
    path = environ.get("tiddlyweb.config", {}).get("server_prefix", "")
    cookie = Cookie.SimpleCookie()
    cookie["tiddlyweb_user"] = ""
    cookie["tiddlyweb_user"]["path"] = "%s/" % path

    if "MSIE" in environ.get("HTTP_USER_AGENT", ""):
        cookie["tiddlyweb_user"]["expires"] = time.strftime(
            "%a, %d-%m-%y %H:%M:%S GMT", time.gmtime(time.time() - 600000)
        )
    else:
        cookie["tiddlyweb_user"]["max-age"] = "0"

    cookie_output = cookie.output(header="")
    start_response(
        "303 See Other",
        [
            ("Set-Cookie", cookie_output),
            ("Expires", time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(time.time() - 600000))),
            ("Cache-Control", "no-store"),
            ("Location", uri),
        ],
    )

    return [uri]
def yql_query(symbol, start=time.strftime("%Y-%m-%d"), end=time.strftime("%Y-%m-%d")):
    """
    Executing yql request (Rest call)
    :param symbol: The index symbol
    :param start: start date
    :param end: end date
    :return: JSON data from yahoo server.
    """
    baseurl = "https://query.yahooapis.com/v1/public/yql?"
    yql_query = (
        'SELECT * FROM yahoo.finance.historicaldata WHERE symbol="'
        + symbol
        + '" and startDate="'
        + start
        + '" and endDate="'
        + end
        + '"'
    )
    yql_url = baseurl + urllib.urlencode({"q": yql_query}) + "&format=json&env=http://datatables.org/alltables.env"
    print yql_url
    result = urllib2.urlopen(yql_url).read()
    data = json.loads(result)
    print data["query"]["results"]
    if data["query"]["results"] is None:
        return None
    else:
        return data["query"]["results"]["quote"]
Example #13
0
    def test_log_entry_collection_handler(self):
        node_ip = "10.20.0.130"
        log_entries = [
            [time.strftime(settings.UI_LOG_DATE_FORMAT), "LEVEL111", "text1"],
            [time.strftime(settings.UI_LOG_DATE_FORMAT), "LEVEL222", "text2"],
        ]
        self.env.create_cluster()
        cluster = self.env.clusters[0]
        node = self.env.create_node(cluster_id=cluster.id, ip=node_ip)
        self._create_logfile_for_node(settings.LOGS[0], log_entries)
        self._create_logfile_for_node(settings.LOGS[1], log_entries, node)

        resp = self.app.get(
            reverse("LogEntryCollectionHandler"),
            params={"source": settings.LOGS[0]["id"]},
            headers=self.default_headers,
        )
        self.assertEqual(200, resp.status_code)
        response = resp.json_body
        response["entries"].reverse()
        self.assertEqual(response["entries"], log_entries)

        resp = self.app.get(
            reverse("LogEntryCollectionHandler"),
            params={"node": node.id, "source": settings.LOGS[1]["id"]},
            headers=self.default_headers,
        )
        self.assertEqual(200, resp.status_code)
        response = resp.json_body
        response["entries"].reverse()
        self.assertEqual(response["entries"], log_entries)
Example #14
0
def export(data):
    def elapsed(game_time):
        return "%3d:%02d:%02d" % ((game_time // 3600) % 3600, (game_time // 60) % 60, game_time % 60)

    querytime = config.getint("querytime") or int(time.time())

    openlog()

    logfile.write(
        "%s,%s,%s,%s,%s,%s\r\n"
        % (
            time.strftime("%Y-%m-%d", time.localtime(querytime)),
            time.strftime("%H:%M:%S", time.localtime(querytime)),
            data["lastSystem"]["name"],
            data["commander"]["docked"] and data["lastStarport"]["name"] or "",
            ship_map.get(data["ship"]["name"], data["ship"]["name"]),
            ",".join(
                [
                    ("%d %s" % (x["qty"], commodity_map.get(x["commodity"], x["commodity"])))
                    for x in data["ship"]["cargo"]["items"]
                    if x["commodity"] != "drones"
                ]
            ),
        )
    )
    logfile.flush()
Example #15
0
 def _get_leave_status(self, cr, uid, ids, name, args, context=None):
     holidays_obj = self.pool.get("hr.holidays")
     holidays_id = holidays_obj.search(
         cr,
         uid,
         [
             ("employee_id", "in", ids),
             ("date_from", "<=", time.strftime("%Y-%m-%d %H:%M:%S")),
             ("date_to", ">=", time.strftime("%Y-%m-%d 23:59:59")),
             ("type", "=", "remove"),
             ("state", "not in", ("cancel", "refuse")),
         ],
         context=context,
     )
     result = {}
     for id in ids:
         result[id] = {
             "current_leave_state": False,
             "current_leave_id": False,
             "leave_date_from": False,
             "leave_date_to": False,
         }
     for holiday in self.pool.get("hr.holidays").browse(cr, uid, holidays_id, context=context):
         result[holiday.employee_id.id]["leave_date_from"] = holiday.date_from
         result[holiday.employee_id.id]["leave_date_to"] = holiday.date_to
         result[holiday.employee_id.id]["current_leave_state"] = holiday.state
         result[holiday.employee_id.id]["current_leave_id"] = holiday.holiday_status_id.id
     return result
Example #16
0
def get_filterdate(filterDate, dateTime):
    """
    Get filterdate.
    """

    returnvalue = ""
    dateYear = strftime("%Y", gmtime(dateTime))
    dateMonth = strftime("%m", gmtime(dateTime))
    dateDay = strftime("%d", gmtime(dateTime))
    if (
        filterDate == "today"
        and int(dateYear) == int(localtime()[0])
        and int(dateMonth) == int(localtime()[1])
        and int(dateDay) == int(localtime()[2])
    ):
        returnvalue = "true"
    elif filterDate == "thismonth" and dateTime >= time() - 2592000:
        returnvalue = "true"
    elif filterDate == "thisyear" and int(dateYear) == int(localtime()[0]):
        returnvalue = "true"
    elif filterDate == "past7days" and dateTime >= time() - 604800:
        returnvalue = "true"
    elif filterDate == "":
        returnvalue = "true"
    return returnvalue
    def __reset_acumulado(self):
        self.conf.reset_acumulated()
        self.initial_acumulated_sent = 0
        self.initial_acumulated_received = 0
        self.initial_acumulated_traffic = 0
        self.initial_acumulated_time = 0
        self.max_upload_speed = 0
        self.max_download_speed = 0
        self.consum_window.set_acumulated_time(0, False)
        self.consum_window.set_acumulated_received(0, False)
        self.consum_window.set_acumulated_traffic(0, False)
        self.consum_window.set_acumulated_sent(0, False)

        self.consum_window.set_acumulated_time(0, True)
        self.consum_window.set_acumulated_received(0, True)
        self.consum_window.set_acumulated_traffic(0, True)
        self.consum_window.set_acumulated_sent(0, True)

        last_resumen = self.conf.get_last_resumen()
        self.consum_window.consum_resumen_label.set_markup(
            _(u"<b>Resumen : desde %s</b>" % time.strftime("%d/%m/%y - %H:%M", last_resumen))
        )
        self.consum_window.consum_resumen_label2.set_markup(
            _(u"<b>Resumen : desde %s</b>" % time.strftime("%d/%m/%y - %H:%M", last_resumen))
        )
        self.conf.set_max_upload_speed(self.last_max_output_speed)
        self.conf.set_max_download_speed(self.last_max_input_speed)
        self.conf.save_conf()
        self.consum_window.set_max_upload_speed(self.last_max_output_speed)
        self.consum_window.set_max_download_speed(self.last_max_input_speed)

        self.notified_warning_mb = None
        self.notified_warning_percent = None

        self.update_color_signals()
Example #18
0
def detect_language(this):
    global LANG, HOST, HOST_SCHEME

    # save host
    if "Host" in this.this_request.headers:
        HOST = this.this_request.headers["Host"]
    if "Scheme" in this.this_request.headers:
        HOST_SCHEME = this.this_request.headers["Scheme"]

    # detect lang
    # from request param
    if this.this_request.get("lang") and (this.this_request.get("lang") in [LANG_EN, LANG_RU]):
        LANG = this.this_request.get("lang")

        d = datetime.datetime.now() + datetime.timedelta(days=180)
        utils.Cookie.set("lang", LANG, time.strftime("%a, %d-%b-%Y %H:%M:%S GMT", d.timetuple()))

    # from cookie
    elif utils.Cookie.get("lang"):
        l = utils.Cookie.get("lang")
        if l in LANGS:
            LANG = l
        else:
            LANG = LANG_EN

        d = datetime.datetime.now() + datetime.timedelta(days=180)
        utils.Cookie.set("lang", LANG, time.strftime("%a, %d-%b-%Y %H:%M:%S GMT", d.timetuple()))

    # by default
    else:
        LANG = CONF[HOST]["default_lang"]

    return LANG
Example #19
0
    def create_data_dir(self, datadir, name=None, ts=None, datesubdir=True, timesubdir=True):
        """
        Create and return a new data directory.

        Input:
            datadir (string): base directory
            name (string): optional name of measurement
            ts (time.localtime()): timestamp which will be used if timesubdir=True
            datesubdir (bool): whether to create a subdirectory for the date
            timesubdir (bool): whether to create a subdirectory for the time

        Output:
            The directory to place the new file in
        """

        path = datadir
        if ts is None:
            ts = time.localtime()
        if datesubdir:
            path = os.path.join(path, time.strftime("%Y%m%d", ts))
        if timesubdir:
            tsd = time.strftime("%H%M%S", ts)
            if name is not None:
                tsd += "_" + name
            path = os.path.join(path, tsd)

        return path
Example #20
0
def remind(jenni, input):
    m = r_command.match(input.bytes)
    if not m:
        return jenni.reply("Sorry, didn't understand the input.")
    length, scale, message = m.groups()

    length = float(length)
    factor = scaling.get(scale, 60)
    duration = length * factor

    if duration % 1:
        duration = int(duration) + 1
    else:
        duration = int(duration)

    t = int(time.time()) + duration
    reminder = (input.sender, input.nick, message)

    try:
        jenni.rdb[t].append(reminder)
    except KeyError:
        jenni.rdb[t] = [reminder]

    dump_database(jenni.rfn, jenni.rdb)

    if duration >= 60:
        w = ""
        if duration >= 3600 * 12:
            w += time.strftime(" on %d %b %Y", time.gmtime(t))
        w += time.strftime(" at %H:%MZ", time.gmtime(t))
        jenni.reply("Okay, will remind%s" % w)
    else:
        jenni.reply("Okay, will remind in %s secs" % duration)
Example #21
0
def validate_month(m):
    m1 = strptime(m, "%b")
    m2 = strptime(ctime().split()[1], "%b")
    if strftime("%m", m1) == strftime("%m", m2):
        return True
    else:
        return False
Example #22
0
 def lastlogExit(self):
     starttime = time.strftime("%a %b %d %H:%M", time.localtime(self.logintime))
     endtime = time.strftime("%H:%M", time.localtime(time.time()))
     duration = utils.durationHuman(time.time() - self.logintime)
     f = file("%s/lastlog.txt" % self.env.cfg.get("honeypot", "data_path"), "a")
     f.write("root\tpts/0\t%s\t%s - %s (%s)\n" % (self.clientIP, starttime, endtime, duration))
     f.close()
Example #23
0
def add_new_message(id, user, user_profile, text):
    """
    Add new message
    :param id: ID conference
    :type id: basestring
    :type user: basestring
    :type user_profile: basestring
    :type text: basestring
    """
    try:
        if not verification_user(id, user):
            return get_new_message_for_user(user)
        if checking_conference(id):
            conferences = get_memcached(get_key("conferences"))
            for key in conferences[id]["users"].keys():
                conferences[id]["users"][key]["messages"].append(
                    dict(
                        user=user, text=text, time=strftime("%H:%M:%S"), date=strftime("%Y-%m-%d"), profile=user_profile
                    )
                )
            set_memcached(get_key("conferences"), conferences)
    except:
        data = json.dumps({"cmd": "Error", "data": {"msg": str(sys.exc_info())}})
        return HttpResponse(data, mimetype="application/json", status=200)

    return get_new_message_for_user(user)
Example #24
0
 def _http_request(self, cmd, path, auth, send):  # noqa
     if self._debug:
         path2 = path
         if len(path2) > 50:
             path2 = path2[:50] + "[...]"
         error_msg = "%s %s %s" % (time.strftime("%Y-%m-%d %H:%M:%S"), cmd, path2)
         print >> sys.stderr, error_msg
     self._conn.putrequest(cmd, path)
     self._conn.putheader("User-Agent", self._created_by)
     if auth:
         base64_user_pass = base64.encodestring(self._username + ":" + self._password).strip()
         self._conn.putheader("Authorization", "Basic " + base64_user_pass)
     if send is not None:
         self._conn.putheader("Content-Length", len(send))
     self._conn.endheaders()
     if send:
         self._conn.send(send)
     response = self._conn.getresponse()
     if response.status != 200:
         payload = response.read().strip()
         if response.status == 410:
             return None
         raise ApiError(response.status, response.reason, payload)
     if self._debug:
         error_msg = "%s %s %s" % (time.strftime("%Y-%m-%d %H:%M:%S"), cmd, path2)
         print >> sys.stderr, error_msg
     return response.read()
Example #25
0
    def getLastUpdated(self):
        """ parse the lsup time
        @note: there seems to be a problem with AM/PM not parsing correctly
        """
        logger.log(9, 'getLastUpdated() "%s"', self.updated)
        if self.zone < 0:
            return "%s  (GMT%s)" % (self.updated, self.zone)
        elif self.zone > 0:
            return "%s  (GMT+%s)" % (self.updated, self.zone)
        else:
            return "%s  (GMT)" % (self.updated)

        # this was a silly idea but the dates are very american
        am = re.compile("(.*) AM.*")
        if am.match(self.updated):
            value = time.strptime(am.match(self.updated).groups()[0], "%m/%d/%y %H:%M")
            return time.strftime("%c", time.localtime(time.mktime(value)))
        else:
            pm = re.compile("(.*) PM.*")
            if pm.match(self.updated):
                value = time.strptime(pm.match(self.updated).groups()[0], "%m/%d/%y %H:%M")
                (year, mon, day, hour, min, sec, weekday, yearday, saving) = value
                value = (year, mon, day, hour + 12, min, sec, weekday, yearday, saving)
                return time.strftime("%c", time.localtime(time.mktime(value)))
            else:
                return self.updated.replace(" Local Time", "")
Example #26
0
def generate_log(log, what2log):
    if not os.path.isfile(log):
        with open(log, "wb") as fo:
            fo.write(time.strftime("%Y-%m-%dT%H:%M:%S ") + getpass.getuser() + " " + what2log + " \n")
    else:
        with open(log, "ab") as fo:
            fo.write(time.strftime("%Y-%m-%dT%H:%M:%S ") + getpass.getuser() + " " + what2log + " \n")
Example #27
0
def uploadfiles(request):
    elapsedtime = time.time()
    status = "up"
    constants = setConstants(request, im)
    constants["jobnumber"] = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())

    if request.POST:
        jobinfo, images = prepareFiles(request, im.validateonly, im.BMUoptions, constants)
    else:
        jobinfo = {}
        images = []

    timestamp = time.strftime("%b %d %Y %H:%M:%S", time.localtime())
    elapsedtime = time.time() - elapsedtime
    logger.info("%s :: %s :: %s" % ("uploadmedia job ", constants["jobnumber"], "-"))

    return render(
        request,
        "uploadmedia.html",
        {
            "apptitle": TITLE,
            "serverinfo": SERVERINFO,
            "images": images,
            "count": len(images),
            "constants": constants,
            "jobinfo": jobinfo,
            "validateonly": im.validateonly,
            "version": prmz.VERSION,
            "dropdowns": im.BMUoptions,
            "override_options": override_options,
            "status": status,
            "timestamp": timestamp,
            "elapsedtime": "%8.2f" % elapsedtime,
        },
    )
Example #28
0
    def setup_data_vault(self):
        localtime = time.localtime()
        # check if there's a specified save directory and dataset
        try:
            directory = self.parameters.get("Spectrum.save_directory")
            datasetNameAppend = self.parameters.get("Spectrum.dataset_name_append")
        except KeyError:
            dirappend = [time.strftime("%Y%b%d", localtime), time.strftime("%H%M_%S", localtime)]
            directory = ["", "Experiments"]
            directory.extend([self.name])
            directory.extend(dirappend)
            datasetNameAppend = time.strftime("%Y%b%d_%H%M_%S", localtime)

        self.datasetName = "Spectrum {}".format(datasetNameAppend)
        self.dv.cd(directory, True, context=self.spectrum_save_context)
        self.dv.new(
            self.datasetName,
            [("Freq", "MHz")],
            [("Excitation Probability", "Arb", "Arb")],
            context=self.spectrum_save_context,
        )
        window_name = self.parameters.get("Spectrum.window_name", ["Spectrum"])
        self.dv.add_parameter("Window", window_name, context=self.spectrum_save_context)
        self.dv.add_parameter("plotLive", True, context=self.spectrum_save_context)
        self.directory = directory
Example #29
0
    def delete(self, thema, id, beitragID=None):
        discussionpath = "./data/themen/" + thema + "/" + id + ".json"
        with open(discussionpath, "r") as discussionfile:
            discussion = json.load(discussionfile)

        if beitragID == None:
            if discussion["Status"] == "deleted":
                discussion["Status"] = " "
            else:
                discussion["Status"] = "deleted"

            discussion["Bearbeiter"] = cherrypy.session["Benutzername"]
            discussion["Bearbeitet"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
        else:
            for post in discussion["Beitraege"]:
                if post["ID"] == beitragID:
                    if post["Status"] == "deleted":
                        post["Status"] = " "
                    else:
                        post["Status"] = "deleted"
                    post["Bearbeiter"] = cherrypy.session["Benutzername"]
                    post["Bearbeitet"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())

        with open(discussionpath, "w") as discussionfile:
            json.dump(discussion, discussionfile, indent=4)
Example #30
0
def set_status(text, old_text=[""]):
    if text != old_text[0]:
        old_text[0] = text
        print time.strftime("[%F %T]"), text
        f = file("/home/josh/code/evolve/STATUS", "w")
        print >> f, text
        f.close()