Example #1
0
    def write_progress(self, rate=None, prestart=None, wait=None, complete=False, error=None):
        # compute string lengths
        action = self.action.rjust(self.actionwidth)

        if error:
            rate = error
        elif prestart:
            rate = "starting"
        elif wait:
            rate = ("%s" % self.retry_wait) + "s..."
        elif complete:
            rate = "done"
        else:
            rate = "%s/s" % self.format_size(rate)
        rate = rate.ljust(self.ratewidth)

        url = self.url_fmt

        if self.totalsize:
            size = self.format_size(self.totalsize)
        elif self.download_size:
            size = self.format_size(self.download_size)
        else:
            size = "????? B"
        size = ("  %s" % size).ljust(self.sizewidth)

        # add formatting
        if error:
            rate = ansicolor.red(rate)
        elif prestart or wait:
            rate = ansicolor.cyan(rate)
        elif complete:
            rate = ansicolor.green(rate)
        else:
            rate = ansicolor.yellow(rate)

        # draw progress bar
        if not (error or prestart or complete) and self.totalsize:
            c = int(self.urlwidth * self.download_size / self.totalsize)
            url = ansicolor.wrap_string(self.url_fmt, c, None, reverse=True)

        if not self.totalsize:
            size = ansicolor.yellow(size)

        line = "%s ::  %s  " % (action, rate)

        term = (os.environ.get("DEBUG_FETCH") and "\n") or "\r"
        if error or complete:
            term = "\n"
        ioutils.write_err("%s%s%s%s" % (line, url, size, term))

        # log download
        if error:
            self.log_url(error, error=True)
        elif complete:
            self.log_url("done")
Example #2
0
def checkBoardOutline(self, filepath):
    filename = os.path.basename(filepath)
    #Basic gerber checks
    checkGerberFile(self, filepath)
    #Compute board outline
    millLines = readFileLines(filepath)
    # Find factors to get absolute coordinates:
    x_factor, y_factor = findCoordinateFormat(millLines)
    #We can only interpret the file if coordinates are absolute
    if not "G90*" in millLines:
        print (yellow("Mill coordinates in %s don't seem to be absolute (G90 missing!)" % filename))
        return
    #Determine coordinate units
    unit = parseGerberUnit(millLines)
    if unit is None: #Neither inch nor mm found
        print (yellow("Could not find coordinate units (mm/in) in %s" % filename))
        return
    #Parse the aperture list
    apertures = parseGerberApertures(millLines)
    selectApertureRegex = re.compile(r"(D\d+)\*")
    move2DRegex = re.compile(r"X(\d+)Y(\d+)D(\d)\*") #Move (D2) or draw (D1)
    move1DRegex = re.compile(r"([XY])(\d+)D(\d)\*") #With only one coordinate
    #Try to interpret gerber file
    minCoords = (sys.maxsize, sys.maxsize)
    maxCoords = (0, 0)
    lastCoords = (0, 0)
    currentAperture = None
    for line in millLines:
        if selectApertureRegex.match(line):
            apertureCode = selectApertureRegex.match(line).group(1)
            currentAperture = findAperture(apertures, apertureCode)
        elif move2DRegex.match(line):
            match = move2DRegex.match(line)
            x = int(match.group(1)) / x_factor
            y = int(match.group(2)) / y_factor
        elif move1DRegex.match(line):
            match = move1DRegex.match(line)
            if match.group(1) == "X":
                x = int(match.group(2)) / x_factor
                y = lastCoords[1]
            elif match.group(1) == "Y":
                x = lastCoords[0]
                y = int(match.group(2)) / y_factor
            else: raise Exception("Internal error: Invalid coordinate type in 1D move: %s" % match.group(1))
        else: continue
        #Compute min/max coordinates
        lastCoords = (x, y)
        minCoords = (min(minCoords[0], lastCoords[0]), min(minCoords[1], lastCoords[1]))
        maxCoords = (max(maxCoords[0], lastCoords[0]), max(maxCoords[1], lastCoords[1]))
    #Compute board size (minimum enclosing rectangle)
    boardSize = (maxCoords[0] - minCoords[0], maxCoords[1] - minCoords[1])
    #Print info
    print (black("\tGerber offset: ({1:.2f} {0}, {2:.2f} {0})".format(unit, minCoords[0], minCoords[1])))
    print (black("\tBoard size (minimum rectangle): %.1f %s x %.1f %s" % \
            (boardSize[0], unit, boardSize[1], unit)))
Example #3
0
def inform(msg, minor=False, major=False):
    if major:
        ansicolor.write_out(ansicolor.yellow('>>> %s\n' % msg))
    elif minor:
        ansicolor.write_out(ansicolor.cyan('-> %s\n' % msg))
    else:
        ansicolor.write_out(ansicolor.green('> %s\n' % msg))
def handle_answer(s, p):
    fromname = p.fromname
    try:
        flag = ms_flag_values[p[SAPMS].flag]
    except:
        flag = "0"
    try:
        opcode = str(ms_opcode_values[p[SAPMS].opcode])
    except:
        opcode = str(p[SAPMS].opcode)
    try:
        opcode_err = str(ms_opcode_error_values[p[SAPMS].opcode_error])
    except:
        opcode_err = 'None'

    if opcode_err == 'MSOP_OK':
        opcode_err = green(opcode_err)
    else:
        opcode_err = red(opcode_err, bold=True)

    if p.key != null_key:
        key = " key: " + yellow('NOT NULL', bold=True)
        logger.error("[!] Out of order packets, reload this script.")
        #s.close()
        #exit(0)
    else:
        key = ""

    logger.info("flag: " + cyan(flag) + " opcode:" + cyan(opcode) + \
        " opcode_error: " + green(opcode_err) + key)
Example #5
0
def inform(msg, minor=False, major=False):
    if major:
        ansicolor.write_out(ansicolor.yellow('>>> %s\n' % msg))
    elif minor:
        ansicolor.write_out(ansicolor.cyan('-> %s\n' % msg))
    else:
        ansicolor.write_out(ansicolor.green('> %s\n' % msg))
Example #6
0
def handle_answer(s, p):
    fromname = p.fromname
    try:
        flag = ms_flag_values[p[SAPMS].flag]
    except:
        flag = "0"
    try:
        opcode = str(ms_opcode_values[p[SAPMS].opcode])
    except:
        opcode = str(p[SAPMS].opcode)
    try:
        opcode_err = str(ms_opcode_error_values[p[SAPMS].opcode_error])
    except:
        opcode_err = 'None'

    if opcode_err == 'MSOP_OK':
        opcode_err = green(opcode_err)
    else:
        opcode_err = red(opcode_err, bold=True)

    if p.key != null_key:
        p.show()
        key = " key: " + yellow('NOT NULL', bold=True)
        print "[!] Out of order packets, reload this script."
        #s.close()
        #exit(0)
    else:
        key = ""

    print "flag: " + cyan(flag) + " opcode:" + cyan(opcode) + \
        " opcode_error: " + green(opcode_err) + key

    # "idenfify request from the server?
    if key != "" and flag == 'MS_REQUEST' and opcode == '0':
        s.send(ms_adm_nilist(p, 1))
Example #7
0
 def restore(cls, url):
     hostname = urlrewrite.get_hostname(url)
     filename = urlrewrite.hostname_to_filename(hostname)
     q, wb = None, None
     if (ioutils.file_exists(filename + ".web", dir=ioutils.LOGDIR)):
         ioutils.write_err("Restoring web from %s ..." %
                         ansicolor.yellow(filename + ".web"))
         wb = ioutils.deserialize(filename + ".web", dir=ioutils.LOGDIR)
         ioutils.write_err(ansicolor.green("done\n"))
     if (ioutils.file_exists(filename + ".session", dir=ioutils.LOGDIR)):
         ioutils.write_err("Restoring session from %s ..." %
                         ansicolor.yellow(filename + ".session"))
         q = ioutils.deserialize(filename + ".session", dir=ioutils.LOGDIR)
         q = recipe.overrule_records(q)
         ioutils.write_err(ansicolor.green("done\n"))
     return cls(wb=wb, queue=q)
Example #8
0
def checkGerberFile(self, filepath):
    """
    Check if the given file is a RS-274X gerber file
    - Checks for a G04 command at the beginning of the file
    - Checks for a %LN command and verifies it against the filename
    - Checks for a G04 #@! TF.FileFunction command
    """
    filename = os.path.basename(filepath)
    lines = readFileLines(filepath)
    #Find G04 line (i.e. what software created the file)
    if not any(map(lambda l: l.startswith("G04 "), lines)):
        print (red("Couldn't find G04 command (software description) in %s. Probably not a Gerber file." % filename, bold=True))
    #Find %LN line, i.e. what the creating
    # software thinks the current layer is (e.g. "BottomMask")
    layerNoteRegex = re.compile(r"^\%LN([^\*]+)\*%$")
    fileFunctionRegex = re.compile(r"G04 #@! TF\.FileFunction,([^\*]+)\*")
    layerDescription = None
    for line in lines:
        if layerNoteRegex.match(line):
            layerDescription = layerNoteRegex.match(line).group(1)
            break #Expecting only one layer note
        elif fileFunctionRegex.match(line):
            layerDescription = fileFunctionRegex.match(line).group(1)
            layerDescription = layerDescription.split(",")
    #Check if the layer note we found makes sense
    if layerDescription == None: #No %LN line found
        print (yellow("Couldn't find %%LN command or file function command in %s" % filename))
    else: #We found a layer description. Check for sanity
        if isinstance(layerDescription, list): # FileFunction command
            if layerDescription not in allowedLayerNotes[self.name]:
                    print (red("Layer description '%s' in %s does not match any of the expected descriptions: %s" % (layerDescription, filename, allowedLayerNotes[self.name]), bold=True))

        else: # %LN command
            if layerDescription not in allowedLayerNotes[self.name]:
                print (red("Layer description '%s' in %s does not match any of the expected descriptions: %s" % (layerDescription, filename, allowedLayerNotes[self.name]), bold=True))
Example #9
0
def checkGerberFile(self, filepath):
    """
    Check if the given file is a RS-274X gerber file
    - Checks for a G04 command at the beginning of the file
    - Checks for a %LN command and verifies it against the filename
    - Checks for a G04 #@! TF.FileFunction command
    """
    filename = os.path.basename(filepath)
    lines = readFileLines(filepath)
    #Find G04 line (i.e. what software created the file)
    if not any(map(lambda l: l.startswith("G04 "), lines)):
        print(red("Couldn't find G04 command (software description) in %s. Probably not a Gerber file." % filename, bold=True))
    #Find %LN line, i.e. what the creating
    # software thinks the current layer is (e.g. "BottomMask")
    layerNoteRegex = re.compile(r"^\%LN([^\*]+)\*%$")
    fileFunctionRegex = re.compile(r"G04 #@! TF\.FileFunction,([^\*]+)\*")
    layerDescription = None
    for line in lines:
        if layerNoteRegex.match(line):
            layerDescription = layerNoteRegex.match(line).group(1)
            break #Expecting only one layer note
        elif fileFunctionRegex.match(line):
            layerDescription = fileFunctionRegex.match(line).group(1)
            layerDescription = layerDescription.split(",")
    #Check if the layer note we found makes sense
    if layerDescription == None: #No %LN line found
        print(yellow("Couldn't find %%LN command or file function command in %s" % filename))
    else: #We found a layer description. Check for sanity
        if isinstance(layerDescription, list): # FileFunction command
            if layerDescription not in allowedLayerNotes[self.name]:
                    print(red("Layer description '%s' in %s does not match any of the expected descriptions: %s" % (layerDescription, filename, allowedLayerNotes[self.name]), bold=True))

        else: # %LN command
            if layerDescription not in allowedLayerNotes[self.name]:
                print(red("Layer description '%s' in %s does not match any of the expected descriptions: %s" % (layerDescription, filename, allowedLayerNotes[self.name]), bold=True))
Example #10
0
def format_field_as_txt(field_name: str,
                        field_doc: FieldDoc,
                        second_column: int,
                        field_prefix: str = '') -> str:
    output = ''

    field_name_length = \
        INDENT + \
        len(field_prefix + field_name + FIELD_SUFFIX) + \
        INDENT

    field_name = \
        ' ' * INDENT + \
        ansicolor.cyan(field_prefix + field_name) + FIELD_SUFFIX + \
        ' ' * INDENT

    description_indent = ' ' * second_column

    description = field_doc['description']
    output += field_name + \
        textwrap.fill(
            description,
            width=78,
            initial_indent=description_indent,
            subsequent_indent=description_indent
        )[field_name_length:] + '\n'

    if 'examples' in field_doc:
        output += description_indent + \
            ansicolor.yellow('Examples:') + ' ' + \
            str(field_doc['examples']) + '\n'
    output += '\n\n'

    return output
Example #11
0
def ms_connect(mshost, msport, login_packet):
    try:
        s = SAPNIStreamSocket.get_nisocket(mshost, msport)
    except socket.error:
        print "[!] Connection error to %s:%s" % (mshost, msport)
        exit(-1)
    r = s.sr(login_packet)
    print "[+] Connected to message server " + yellow(mshost + ":%s" % msport, bold=True)
    return s
Example #12
0
 def save(self):
     hostname = urlrewrite.get_hostname(self.wb.root.url)
     filename = urlrewrite.hostname_to_filename(hostname)
     ioutils.write_err("Saving session to %s ..." %
                     ansicolor.yellow(filename + ".{web,session}"))
     ioutils.serialize(self.wb, filename + ".web", dir=ioutils.LOGDIR)
     if self.queue:
         ioutils.serialize(self.queue, filename + ".session", dir=ioutils.LOGDIR)
     # only web being saved, ie. spidering complete, remove old session
     elif ioutils.file_exists(filename + ".session", dir=ioutils.LOGDIR):
         ioutils.delete(filename + ".session", dir=ioutils.LOGDIR)
     ioutils.write_err(ansicolor.green("done\n"))
def f_verbose(value):
    if ("[X]" in value) or ("[+]" in value):
        f_save(value + '\n')

    col_cred = value.split('`')
    neutrino = ''

    for index, item in enumerate(col_cred):
        if index & 1:
            neutrino = neutrino + blue(item)
        else:
            neutrino += item
    if "[X]" in neutrino:
        print neutrino.replace("[X]", red("[X]"))
    elif "[+]" in neutrino:
        print neutrino.replace("[+]", yellow("[+]"))
    elif args.verbose:
        print neutrino.replace("[*]",
                               green("[*]")).replace("[!]", magenta("[!]"))

    return
Example #14
0
def cli():
    search = subprocess.check_output(
        "ag -i -C 2 '// ?todo' | sed 's#\([^/]\)[^/: ]*/#\\1/#g'", shell=True)
    search_lines = search.split("\n")
    search_lines.pop()

    print("")

    if not len(search_lines):
        print(green("✓ No TODOs found!"))
        sys.exit(0)

    print(yellow("âš  Be aware of these TODOs:"))
    todo_line_pattern = re.compile(".*// ?todo.*", re.IGNORECASE)
    for line in search_lines:
        if re.match(todo_line_pattern, line):
            print(green(line))
        else:
            print(line)

    print("")
 def __init__(self):
     """Setup a new connection"""
     print(yellow("Initializing new YakDB connection"))
     self.db = YakDBDocumentDatabase()
     # Initialize NLTK objects
     self.nerTokenizer = RegexpTokenizer(r'\s+', gaps=True)
Example #16
0
def complain(msg, minor=False):
    if minor:
        ansicolor.write_out(ansicolor.yellow('-> %s\n' % msg))
    else:
        ansicolor.write_out(ansicolor.yellow('> %s\n' % msg))
Example #17
0
 def assert_in_web(self, url):
     if url not in self.index:
         ioutils.write_err("Url %s not in the web\n" % ansicolor.yellow(url))
         sys.exit(1)
Example #18
0
def read_timeline(filename, verbose=False):
    """
    Reads a timeline of intervals from a text file. Returns the timeline and if there are errors.
        :param filename: is the path to the timeline file.
        :param verbose: is an optional flag.
            When true, extra parsing information is printed to the console. Defaults to false.
    """
    # make timeline array
    timeline = []
    steps = 0

    count, total = 0, 0

    # Yes, this is the second part where this is checked. Gotta be sure.
    if not file_exists(filename):
        if verbose:
            print(add_quotes(filename), "is not a file")
        return False

    errors = False

    with open(filename) as lines:
        for num, line in enumerate(lines):
            line = line.strip()
            if verbose:
                print("reading line {} >".format(num + 1), add_quotes(line))

            if not line:
                if verbose:
                    print(black("Skipping blank line\n", bold=True))
                continue

            if line.startswith("#"):
                if verbose:
                    print(black("Skipping comment line\n", bold=True))
                continue

            # if you find the comment symbol, ignore everything after it
            comment_pos = line.find("#")
            if comment_pos != -1:
                if verbose:
                    print(black("\tRemoving comment > ", bold=True), end='')
                    print(add_quotes(line[comment_pos:]))
                    print(black("\tParsing remaining line > ", bold=True),
                          end='')
                    print(add_quotes(line[:comment_pos]))
                line = line[:comment_pos]

            if count == 0 and steps == 0:
                found_steps = False
                try:
                    nums_in_line = re.findall(r'\d+', line)
                    if len(nums_in_line) == 1:
                        steps = int(nums_in_line[0])
                        found_steps = True
                except (ValueError, IndexError):
                    pass
                if verbose:
                    if found_steps:
                        print(green("\tFound step count:", bold=True), steps)
                    else:
                        print(
                            yellow(
                                "\tFailed to find step count before first interval.",
                                bold=True))
                        print(
                            black("\t\tSetting step count to default:",
                                  bold=True), DEFAULT_STEPS_IN_TIMELINE)
                if found_steps:
                    continue

            intvs = line.split(",")
            timeline.append([])

            for intv in intvs:

                intv = intv.strip()

                # see if it's in the form [#] [#] [#]
                if not parse_check_format(intv, verbose):
                    errors = True
                    continue

                params = intv.split()

                # check that each number is legit
                if not parse_check_numbers(params, verbose):
                    errors = True
                    continue

                # use those valid numbers to make an interval
                new_interval = Interval(int(params[0]), int(params[1]),
                                        int(params[2]))

                if verbose:
                    print(green("\t\tinterval >"),
                          interval_to_string(new_interval))

                total = total + 1
                timeline[count].append(new_interval)

            # if it's run through the line and not added any intervals...
            if not timeline[count]:
                if verbose:
                    print(yellow("no intervals found. Skipping line."))
                del timeline[-1]
            else:
                if verbose:
                    print(green("intervals found:"), len(timeline[count]))
                count = count + 1

            if verbose:
                print()  # newline

    if verbose:
        print("reached end of file.")
        print("found {} intervals across {} channels.".format(
            total, len(timeline)))

    if steps == 0:
        steps = DEFAULT_STEPS_IN_TIMELINE

    return timeline, steps, errors
    # 11- Here looping and answer to RGWMON_SEND_NILIST + SELFIDENT packets
    while True:
        logger.info("Waiting for packets...")
        r = s.recv()

        # most of the case when packet is properly parsed
        if r.haslayer(SAPMSAdmRecord):
            rec = r.adm_records[0].record
            if not rec: continue
            opc = ms_adm_opcode_values[r.adm_records[0].opcode]

            # discard packets if key is NULL
            if r.key == null_key: continue
            foo, key_t, key_u, key_respid = struct.unpack('!BBHL', r.key)
            print "%s > %s: key '%s' = session T%d_U%d_M0 (RespId %d)" % (
                yellow(r[SAPMS].fromname.strip()),
                yellow(r[SAPMS].toname.strip()), r.key.encode('hex'), key_t,
                key_u, key_respid)

            if 'RGWMON_SEND_NILIST' in rec or 'RSMONGWY_SEND_NILIST' in rec:
                if args.debug: r.show()
                print "%s > %s: Ask for RGWMON_SEND_NILIST report" % (yellow(
                    r[SAPMS].fromname.strip()), yellow(
                        r[SAPMS].toname.strip()))
                # let's filter out packets from other AS
                if r[SAPMS].fromname.strip() != target:
                    print "Dropping packet as it's not from our target."
                    continue

                p = ms_adm_nilist(r, rec)
                if args.debug:
def ms_adm_nilist(p, whos_asking):
    print "[+] " + yellow(
        "Generating AD_GET_NILIST_PORT answer for request with key",
        bold=True) + " '%s'" % p.key.encode('hex')
    fromname = str()
    toname = str()
    answer = 1

    # extract info from key
    foo, key_t, key_u, key_respid = struct.unpack('!BBHL', p.key)

    fromname = my_name
    toname = p.fromname

    key = p.key
    flag = 'MS_REPLY'
    opcode_version = 5
    adm_type = 'ADM_REPLY'
    rec = ' ' * 100
    recno = 0
    records = None

    r = SAPMS(toname=toname,
              fromname=fromname,
              key=key,
              domain='ABAP',
              flag=flag,
              iflag='MS_SEND_NAME',
              opcode='MS_DP_ADM',
              opcode_version=p.opcode_version,
              opcode_charset=p.opcode_charset,
              dp_version=p.dp_version,
              adm_recno=recno,
              adm_type=adm_type,
              adm_records=records)

    ###############################
    # 745 KERNEL and sometime 742 #
    ###############################
    # why "sometime" for 742?
    # they have both programs, old "RSMONGWY_SEND_NILIST" and new "RGWMON_SEND_NILIST"
    # they both use dp_version=13, but IP list format expected in the ADM layer is a
    # bit different between both programs.
    if p.dp_version == 13:
        r.adm_recno = 4
        if 'RSMONGWY_SEND_NILIST' in whos_asking:
            r.adm_records = [
                SAPMSAdmRecord(opcode='AD_SELFIDENT',
                               record=rec,
                               serial_number=0,
                               executed=answer),
                SAPMSAdmRecord(opcode='AD_GET_NILIST',
                               record=ms_adm_build_old_ip_record("127.0.0.1"),
                               serial_number=0,
                               executed=answer),
                SAPMSAdmRecord(opcode='AD_GET_NILIST',
                               record=ms_adm_build_old_ip_record("127.0.0.2"),
                               serial_number=1,
                               executed=answer),
                SAPMSAdmRecord(opcode='AD_GET_NILIST',
                               record=ms_adm_build_old_ip_record(
                                   fake_as["ip"]),
                               serial_number=2,
                               executed=answer)
            ]
        else:
            r.adm_records = [
                SAPMSAdmRecord(opcode='AD_SELFIDENT',
                               record=rec,
                               serial_number=0,
                               executed=answer),
                SAPMSAdmRecord(opcode='AD_GET_NILIST_PORT',
                               record=ms_adm_build_ip_record("127.0.0.1"),
                               serial_number=0,
                               executed=answer),
                SAPMSAdmRecord(opcode='AD_GET_NILIST_PORT',
                               record=ms_adm_build_ip_record("127.0.0.2"),
                               serial_number=1,
                               executed=answer),
                SAPMSAdmRecord(opcode='AD_GET_NILIST_PORT',
                               record=ms_adm_build_ip_record(fake_as["ip"]),
                               serial_number=2,
                               executed=answer)
            ]
        r.dp_info1 = SAPDPInfo1(
            dp_req_len=452,
            dp_req_prio='MEDIUM',
            dp_type_from='BY_NAME',
            dp_fromname=my_name,
            dp_agent_type_from='DISP',
            dp_worker_from_num=p.dp_info1.dp_worker_to_num,
            dp_addr_from_t=p.dp_info1.dp_addr_from_t,
            dp_addr_from_u=p.dp_info1.dp_addr_from_u,
            dp_addr_from_m=0,
            dp_respid_from=p.dp_info1.dp_respid_from,
            dp_type_to='BY_NAME',
            dp_toname=p.fromname,
            dp_agent_type_to='WORKER',
            dp_worker_type_to='DIA',
            dp_worker_to_num=p.dp_info1.dp_worker_from_num,
            dp_addr_to_t=p.dp_info1.dp_addr_from_t,
            dp_addr_to_u=p.dp_info1.dp_addr_from_u,
            dp_addr_to_m=p.dp_info1.dp_addr_from_m,
            dp_respid_to=p.dp_info1.dp_respid_from,
            dp_req_handler='REQ_HANDLER_ADM_RESP',
            dp_blob_worker_from_num=p.dp_info1.dp_worker_from_num,
            dp_blob_addr_from_t=p.dp_info1.dp_addr_from_t,
            dp_blob_addr_from_u=p.dp_info1.dp_addr_from_u,
            dp_blob_respid_from=p.dp_info1.dp_blob_respid_from,
            dp_blob_dst=(' ' * 35).encode('UTF-16-BE'))

    ##############
    # 720 KERNEL #
    ##############
    # Here we use old IP list format
    # and a much simpler DP layer
    if p.dp_version == 11:
        r.adm_recno = 4
        r.adm_records = [
            SAPMSAdmRecord(opcode='AD_SELFIDENT',
                           record=rec,
                           serial_number=0,
                           executed=answer),
            SAPMSAdmRecord(opcode='AD_GET_NILIST',
                           record=ms_adm_build_old_ip_record("127.0.0.1"),
                           serial_number=0,
                           executed=answer),
            SAPMSAdmRecord(opcode='AD_GET_NILIST',
                           record=ms_adm_build_old_ip_record("127.0.0.2"),
                           serial_number=1,
                           executed=answer),
            SAPMSAdmRecord(opcode='AD_GET_NILIST',
                           record=ms_adm_build_old_ip_record(fake_as["ip"]),
                           serial_number=2,
                           executed=answer)
        ]

        r.dp_info2 = SAPDPInfo2(dp_req_prio='MEDIUM',
                                dp_blob_14=p.dp_info2.dp_blob_14,
                                dp_name_to=p.fromname,
                                dp_addr_from_t=255,
                                dp_blob_09='\xff\xcc',
                                dp_blob_10='\x01\x00',
                                dp_addr_from_u=0,
                                dp_addr_from_m=0,
                                dp_addr_to_t=key_t,
                                dp_addr_to_u=key_u,
                                dp_addr_to_m=0,
                                dp_respid_to=key_respid,
                                dp_blob_19=1,
                                dp_blob_21=105)
    ##############
    # 749 KERNEL #
    ##############
    # That's use on latest kernel like S4HANA servers
    if p.dp_version == 14:
        r.adm_recno = 4
        r.adm_records = [
            SAPMSAdmRecord(opcode='AD_SELFIDENT',
                           record=rec,
                           serial_number=0,
                           executed=answer),
            SAPMSAdmRecord(opcode='AD_GET_NILIST_PORT',
                           record=ms_adm_build_ip_record("127.0.0.1"),
                           serial_number=0,
                           executed=answer),
            SAPMSAdmRecord(opcode='AD_GET_NILIST_PORT',
                           record=ms_adm_build_ip_record("127.0.0.2"),
                           serial_number=1,
                           executed=answer),
            SAPMSAdmRecord(opcode='AD_GET_NILIST_PORT',
                           record=ms_adm_build_ip_record(fake_as["ip"]),
                           serial_number=2,
                           executed=answer)
        ]
        r.dp_info3 = SAPDPInfo3(dp_req_len=348,
                                dp_req_prio='MEDIUM',
                                dp_type_from='BY_NAME',
                                dp_fromname=my_name,
                                dp_agent_type_from='DISP',
                                dp_worker_from_num=p.dp_info3.dp_worker_to_num,
                                dp_addr_from_t=p.dp_info3.dp_addr_from_t,
                                dp_addr_from_u=p.dp_info3.dp_addr_from_u,
                                dp_addr_from_m=0,
                                dp_respid_from=p.dp_info3.dp_respid_from,
                                dp_type_to='BY_NAME',
                                dp_toname=p.fromname,
                                dp_agent_type_to='WORKER',
                                dp_worker_type_to='DIA',
                                dp_worker_to_num=p.dp_info3.dp_worker_from_num,
                                dp_addr_to_t=p.dp_info3.dp_addr_from_t,
                                dp_addr_to_u=p.dp_info3.dp_addr_from_u,
                                dp_respid_to=p.dp_info3.dp_respid_from,
                                dp_padd25=1,
                                dp_req_handler='REQ_HANDLER_ADM_RESP',
                                dp_padd29=p.dp_info3.dp_padd29,
                                dp_padd30=p.dp_info3.dp_padd30,
                                dp_padd31=p.dp_info3.dp_padd31,
                                dp_padd32=p.dp_info3.dp_padd32)
    open("/tmp/dp.bin", "wb").write(str(SAPNI() / r))
    return r
Example #21
0
def complain(msg, minor=False):
    if minor:
        ansicolor.write_out(ansicolor.yellow('-> %s\n' % msg))
    else:
        ansicolor.write_out(ansicolor.yellow('> %s\n' % msg))
 def __init__(self):
     """Setup a new connection"""
     print(yellow("Initializing new YakDB connection"))
     self.db = YakDBDocumentDatabase()
     # Initialize NLTK objects
     self.nerTokenizer = RegexpTokenizer(r'\s+', gaps=True)
Example #23
0
    def parse_person_detail(self, response):
        """
        Parse a persons detail page
        """
        person = response.meta['person']
        full_name = PERSON.DETAIL.FULL_NAME.xt(response)

        ts = GENERIC.TIMESTAMP.xt(response)
        if not self.IGNORE_TIMESTAMP and not self.has_changes(
                person['parl_id'], person['source_link'], ts):
            self.logger.info(
                green(u"Skipping Person Detail, no changes: {}".format(
                    full_name)))
            return

        self.logger.info(u"Updating Person Detail {}".format(
            green(u"[{}]".format(person['reversed_name']))))

        bio_data = PERSON.DETAIL.BIO.xt(response)
        profile_photo_url = PERSON.DETAIL.PHOTO_URL.xt(response)
        profile_photo_copyright = PERSON.DETAIL.PHOTO_COPYRIGHT.xt(response)

        try:
            person_data = {
                'ts': ts,
                'photo_link': "{}{}".format(BASE_HOST, profile_photo_url),
                'photo_copyright': profile_photo_copyright,
                'full_name': full_name,
                'reversed_name': person['reversed_name'],
                'birthdate': bio_data['birthdate'],
                'birthplace': bio_data['birthplace'],
                'deathdate': bio_data['deathdate'],
                'deathplace': bio_data['deathplace'],
                'occupation': bio_data['occupation']
            }

            person_item, created_person = Person.objects.update_or_create(
                source_link=person['source_link'],
                parl_id=person['parl_id'],
                defaults=person_data)

            mandates_detail = PERSON.DETAIL.MANDATES.xt(response)
            for mandate in mandates_detail:
                if Party.objects.filter(short=mandate['party']):
                    party = Party.objects.filter(
                        short=mandate['party']).first()
                elif Party.objects.filter(titles__contains=[mandate['party']]):
                    party = Party.objects.filter(
                        titles__contains=[mandate['party']]).first()
                else:
                    self.logger.warning(
                        u"Can't find party {} for mandate".format(
                            yellow(u"[{}]".format(mandate['party']))))
                    continue
                mq = person_item.mandates.filter(party=party)

                # try to extract LLP from function string
                if "GP)" in mandate['function']:
                    try:
                        m_llp_roman = re.match('^.*\((.*)\. GP\).*$',
                                               mandate['function']).group(1)
                        m_llp = LegislativePeriod.objects.get(
                            roman_numeral=m_llp_roman)
                        mq = mq.filter(legislative_period=m_llp)
                    except:
                        self.logger.warning(
                            u"Can't find llp in function string {}".format(
                                yellow(u"[{}]".format(mandate['function']))))

                # try to find existing mandate to add in dates
                if mq.count() == 1:
                    md = mq.first()
                    md.start_date = mandate['start_date']
                    if mandate['end_date']:
                        md.end_date = mandate['end_date']
                    md.save()
                    self.logger.info(
                        u"Augmented mandate {} with start-/end-dates: {} - {} "
                        .format(green(u"{}".format(md)), md.start_date,
                                md.end_date))
            person_item.latest_mandate = person_item.get_latest_mandate()

            person_item.save()
            # Instatiate slug
            person_item.slug

        except Exception as error:
            self.logger.info(
                red(u"Error saving Person {}: \n\t{}".format(full_name,
                                                             error)))
            #import ipdb
            # ipdb.set_trace()
            return

        try:
            # Parse the Comittee (Ausschuss) memberships for this person
            memberships = COMITTEE.MEMBERSHIP.xt(response)

            for m in memberships:
                comittee = m['comittee']
                if comittee['nrbr'] == u'Nationalrat':
                    if comittee['legislative_period'] is not None:
                        llp = LegislativePeriod.objects.get(
                            roman_numeral=comittee['legislative_period'])
                        comittee['legislative_period'] = llp

                    comittee_item, created_comittee = Comittee.objects.update_or_create(
                        parl_id=comittee['parl_id'],
                        nrbr=comittee['nrbr'],
                        legislative_period=comittee['legislative_period'],
                        # source_link=comittee['source_link'],
                        active=comittee['active']
                        if 'active' in comittee else True,
                        defaults=comittee)
                    if created_comittee:
                        self.logger.info(u"Created comittee {}".format(
                            green(u"[{}]".format(comittee))))

                    function_data = {
                        'title': m['function'],
                        'short': m['function']
                    }

                    function_item, created_function = Function.objects.get_or_create(
                        **function_data)
                    if created_function:
                        self.logger.info(u"Created function {}".format(
                            green(u"[{}]".format(function_item))))

                    membership_data = {
                        'date_from': m['date_from'],
                        'comittee': comittee_item,
                        'person': person_item,
                        'function': function_item
                    }

                    membership_item, created_membership = ComitteeMembership.objects.update_or_create(
                        defaults={'date_to': m['date_to']}, **membership_data)

                    if created_membership:
                        self.logger.info(u"Created membership {}".format(
                            green(u"[{}]".format(membership_item))))

        except Exception as error:
            self.logger.info(
                red(u"Error adding Person's comittee memberships {}: \n\t{}".
                    format(full_name, error)))
            #import ipdb
            # ipdb.set_trace()
            return
Example #24
0
    def parse_person_detail(self, response):
        """
        Parse a persons detail page
        """
        person = response.meta['person']
        full_name = PERSON.DETAIL.FULL_NAME.xt(response)

        ts = GENERIC.TIMESTAMP.xt(response)
        if not self.IGNORE_TIMESTAMP and not self.has_changes(person['parl_id'], person['source_link'], ts):
            self.logger.info(
                green(u"Skipping Person Detail, no changes: {}".format(
                    full_name)))
            return

        self.logger.info(u"Updating Person Detail {}".format(
            green(u"[{}]".format(person['reversed_name']))
        ))

        bio_data = PERSON.DETAIL.BIO.xt(response)
        profile_photo_url = PERSON.DETAIL.PHOTO_URL.xt(response)
        profile_photo_copyright = PERSON.DETAIL.PHOTO_COPYRIGHT.xt(response)

        try:
            person_data = {
                'ts': ts,
                'photo_link': "{}{}".format(BASE_HOST, profile_photo_url),
                'photo_copyright': profile_photo_copyright,
                'full_name': full_name,
                'reversed_name': person['reversed_name'],
                'birthdate': bio_data['birthdate'],
                'birthplace': bio_data['birthplace'],
                'deathdate': bio_data['deathdate'],
                'deathplace': bio_data['deathplace'],
                'occupation': bio_data['occupation']}

            person_item, created_person = Person.objects.update_or_create(
                source_link=person['source_link'],
                parl_id=person['parl_id'],
                defaults=person_data
            )

            mandates_detail = PERSON.DETAIL.MANDATES.xt(response)
            for mandate in mandates_detail:
                if Party.objects.filter(short=mandate['party']):
                    party = Party.objects.filter(
                        short=mandate['party']).first()
                elif Party.objects.filter(titles__contains=[mandate['party']]):
                    party = Party.objects.filter(
                        titles__contains=[mandate['party']]).first()
                else:
                    self.logger.warning(u"Can't find party {} for mandate".format(
                        yellow(u"[{}]".format(mandate['party']))
                    ))
                    continue
                mq = person_item.mandates.filter(party=party)

                # try to extract LLP from function string
                if "GP)" in mandate['function']:
                    try:
                        m_llp_roman = re.match(
                            '^.*\((.*)\. GP\).*$', mandate['function']).group(1)
                        m_llp = LegislativePeriod.objects.get(
                            roman_numeral=m_llp_roman)
                        mq = mq.filter(legislative_period=m_llp)
                    except:
                        self.logger.warning(u"Can't find llp in function string {}".format(
                            yellow(u"[{}]".format(mandate['function']))
                        ))

                # try to find existing mandate to add in dates
                if mq.count() == 1:
                    md = mq.first()
                    md.start_date = mandate['start_date']
                    if mandate['end_date']:
                        md.end_date = mandate['end_date']
                    md.save()
                    self.logger.info(u"Augmented mandate {} with start-/end-dates ".format(
                        green(u"{}".format(md))
                    ))

            person_item.save()
            # Instatiate slug
            person_item.slug

        except Exception as error:
            self.logger.info(
                red(u"Error saving Person {}: \n\t{}".format(full_name, error)))
            #import ipdb
            # ipdb.set_trace()
            return

        try:
            # Parse the Comittee (Ausschuss) memberships for this person
            memberships = COMITTEE.MEMBERSHIP.xt(response)

            for m in memberships:
                comittee = m['comittee']
                if comittee['nrbr'] == u'Nationalrat':
                    if comittee['legislative_period'] is not None:
                        llp = LegislativePeriod.objects.get(
                            roman_numeral=comittee['legislative_period'])
                        comittee['legislative_period'] = llp

                    comittee_item, created_comittee = Comittee.objects.update_or_create(
                        parl_id=comittee['parl_id'],
                        nrbr=comittee['nrbr'],
                        legislative_period=comittee['legislative_period'],
                        # source_link=comittee['source_link'],
                        active=comittee[
                            'active'] if 'active' in comittee else True,
                        defaults=comittee)
                    if created_comittee:
                        self.logger.info(u"Created comittee {}".format(
                            green(u"[{}]".format(comittee))
                        ))

                    function_data = {
                        'title': m['function'],
                        'short': m['function']
                    }

                    function_item, created_function = Function.objects.get_or_create(
                        **function_data)
                    if created_function:
                        self.logger.info(u"Created function {}".format(
                            green(u"[{}]".format(function_item))
                        ))

                    membership_data = {
                        'date_from': m['date_from'],
                        'comittee': comittee_item,
                        'person': person_item,
                        'function': function_item
                    }

                    membership_item, created_membership = ComitteeMembership.objects.update_or_create(
                        defaults={
                            'date_to': m['date_to']
                        },
                        **membership_data
                    )

                    if created_membership:
                        self.logger.info(u"Created membership {}".format(
                            green(u"[{}]".format(membership_item))
                        ))

        except Exception as error:
            self.logger.info(
                red(u"Error adding Person's comittee memberships {}: \n\t{}".format(full_name, error)))
            #import ipdb
            # ipdb.set_trace()
            return
Example #25
0
    def parse_person_detail(self, response):
        """
        Parse a persons detail page
        """
        person = response.meta.get('person',None)
        if not person:
            person = Person.objects.get(parl_id=[x for x in response.url.split('/') if 'PAD' in x][0]).__dict__
        full_name = PERSON.DETAIL.FULL_NAME.xt(response)

        ts = GENERIC.TIMESTAMP.xt(response)
        if not self.IGNORE_TIMESTAMP and not self.has_changes(person['parl_id'], person['source_link'], ts):
            logger.debug(
                green(u"Skipping Person Detail, no changes: {}".format(
                    full_name)))
            return

        logger.debug(u"Updating Person Detail {}".format(
            green(u"[{}]".format(person['reversed_name']))
        ))

        bio_data = PERSON.DETAIL.BIO.xt(response)
        profile_photo_url = PERSON.DETAIL.PHOTO_URL.xt(response)
        profile_photo_copyright = PERSON.DETAIL.PHOTO_COPYRIGHT.xt(response)

        try:
            person_data = {
                'ts': ts,
                'photo_link': "{}{}".format(BASE_HOST, profile_photo_url),
                'photo_copyright': profile_photo_copyright,
                'full_name': full_name,
                'reversed_name': person['reversed_name'],
                'birthdate': bio_data['birthdate'],
                'birthplace': bio_data['birthplace'],
                'deathdate': bio_data['deathdate'],
                'deathplace': bio_data['deathplace'],
                'occupation': bio_data['occupation']}

            person_item, created_person = Person.objects.update_or_create(
                source_link=person['source_link'],
                parl_id=person['parl_id'],
                defaults=person_data
            )

            mandates_detail = PERSON.DETAIL.MANDATES.xt(response)
            for mandate in mandates_detail:
                party = None
                if mandate['party']:
                    if Party.objects.filter(short=mandate['party']):
                        party = Party.objects.filter(
                            short=mandate['party']).first()
                    elif Party.objects.filter(titles__contains=[mandate['party']]):
                        party = Party.objects.filter(
                            titles__contains=[mandate['party']]).first()
                    else:
                        logger.warning(u"{}: Can't find party {} for mandate".format(
                            person_data['full_name'], yellow(u"[{}]".format(mandate['party']))
                        ))
                        continue

                mandate['party'] = party
                mandate['legislative_period'] = LegislativePeriod.objects.get(
                    roman_numeral=mandate['llp_roman']) if mandate['llp_roman'] else None
                del mandate['llp']
                del mandate['llp_roman']
                mandate['function'],_ = Function.objects.update_or_create(title=mandate['function'].strip())

                def uocparse(mandat, defaults=None):
                    r = {'defaults': {} if not defaults else defaults}
                    for k in mandat.keys():
                        dict_to_append = r if k in (
                            'person','function','party','legislative_period',
                            ) else r['defaults']
                        if dict_to_append==r and mandat[k]==None:
                            dict_to_append[k+'__isnull']=True
                        else:
                            dict_to_append[k]=mandat[k]
                    return r

                ms = Mandate.objects.all()
                mandate['person'] = person_item
                nrbr = False
                ftmp = None
                nr = ('Abgeordnet' in mandate['function'].title and 'Nationalrat' in mandate['function'].title)
                br = ('Mitglied' in mandate['function'].title and 'Bundesrat' in mandate['function'].title)
                if nr or br:
                    nrbr = True
                    ms = person_item.mandate_set.filter(
                        (Q(function__title__contains='Abgeordnet') & Q(function__title__contains='Nationalrat'))
                        if nr else
                        (Q(function__title__contains='Bundesrat') & Q(function__title__contains='Mitglied')))
                    if ms and ms[0].function:
                        ftmp = ms[0].function
                        mandate['function'] = ftmp
                    if br:
                        del mandate['legislative_period']
                    #del mandate['function']


                p = uocparse(mandate, {} if not nrbr or not ftmp else {'function': ftmp})
                if br and ms:
                    p2 = dict(p)
                    del p2['defaults']
                    ms = ms.filter(**p2)
                    if ms:
                        ms = ms.filter(pk=ms[0].pk)
                try:
                    mq = ms.update_or_create(**p)
                except Exception, e:
                    logger.exception(red(u'Error Mandate update_or_create\n{}'.format(e)))

            person_item.latest_mandate = person_item.get_latest_mandate()

            person_item.save()
            # Instatiate slug
            person_item.slug
Example #26
0
def checkBoardOutline(self, filepath):
    filename = os.path.basename(filepath)
    #Basic gerber checks
    checkGerberFile(self, filepath)
    #Compute board outline
    millLines = readFileLines(filepath)
    # Find factors to get absolute coordinates:
    x_factor, y_factor = findCoordinateFormat(millLines)
    # Initialize X & Y
    x, y = 0, 0
    #We can only interpret the file if coordinates are absolute
    if not "G90*" in millLines:
        print(yellow("Mill coordinates in %s don't seem to be absolute (G90 missing!)" % filename))
        return
    #Determine coordinate units
    unit = parseGerberUnit(millLines)
    if unit is None: #Neither inch nor mm found
        print(yellow("Could not find coordinate units (mm/in) in %s" % filename))
        return
    #Parse the aperture list
    apertures = parseGerberApertures(millLines)
    selectApertureRegex = re.compile(r"(D\d+)\*")
    move2DRegex = re.compile(r"X(-?\d+)Y(-?\d+)D(\d+)\*") #Move (D2) or draw (D1)
    move1DRegex = re.compile(r"([XY])(-?\d+)D(\d+)\*") #With only one coordinate
    #Try to interpret gerber file
    minCoords = (sys.maxsize, sys.maxsize)
    maxCoords = (0, 0)
    lastCoords = (0, 0)
    currentAperture = None
    apertureUseCount = Counter()
    for line in millLines:
        if selectApertureRegex.match(line):
            apertureCode = selectApertureRegex.match(line).group(1)
            currentAperture = findAperture(apertures, apertureCode)
        elif move2DRegex.match(line):
            match = move2DRegex.match(line)
            x = int(match.group(1)) / x_factor
            y = int(match.group(2)) / y_factor
            apertureUseCount[currentAperture] += 1
        elif move1DRegex.match(line):
            match = move1DRegex.match(line)
            apertureUseCount[currentAperture] += 1
            if match.group(1) == "X":
                x = int(match.group(2)) / x_factor
                y = lastCoords[1]
            elif match.group(1) == "Y":
                x = lastCoords[0]
                y = int(match.group(2)) / y_factor
            else: raise Exception("Internal error: Invalid coordinate type in 1D move: %s" % match.group(1))
        else: continue
        #Compute min/max coordinates
        lastCoords = (x, y)
        minCoords = (min(minCoords[0], lastCoords[0]), min(minCoords[1], lastCoords[1]))
        maxCoords = (max(maxCoords[0], lastCoords[0]), max(maxCoords[1], lastCoords[1]))
    #Compute board size (minimum enclosing rectangle)
    boardSize = (maxCoords[0] - minCoords[0], maxCoords[1] - minCoords[1])
    # Compute size of most common aperture
    mostCommonAperture = apertureUseCount.most_common(1)[0][0]
    #   info
    print(black("\tGerber offset: ({1:.2f} {0}, {2:.2f} {0})".format(unit, minCoords[0], minCoords[1])))
    print(black("\tBoard size (minimum rectangle): %.1f %s x %.1f %s" % \
            (boardSize[0], unit, boardSize[1], unit)))