Beispiel #1
0
def write_file():
    for line in fi.FileInput('a.md', inplace=1):
        for i in temp:
            if temp[i] == line:
                line = str(new[i - 38]) + "\n"
        print(line, end='')
Beispiel #2
0
def write_file(file,current_list,new_list):
    for line in fi.FileInput(file,inplace=1):
        for i in current_list:
            if current_list[i] == line:
                line = new_list[i]+"\n"
        print(line,end='')
Beispiel #3
0
 for symbol in all_new:
     add_list.extend(symbol)
 print(len(add_list))
 
 # if add list == 0 and active = 1:
 if add_list == 0:
     pass
 # Format for twitter stream track parameter as an array.
 coin_list = " ','$".join(add_list)
 filter_list = "'$" + coin_list + " '"
 print(filter_list)
 
 # Stop, update and restart the Twitter stream
 command="supervisorctl stop twitter.db.altcoin1.py"
 p = os.system('sudo %s' % (command))
 
 command="cp /home/josh/tw/twitter.db.altcoin1.pristine.py /home/josh/tw/twitter.db.altcoin1.py"
 p = os.system(command)
 
 with fileinput.FileInput('/home/josh/tw/twitter.db.altcoin1.py', inplace=True) as file:
     for line in file:
         print(line.replace('listofsymbolshere', filter_list), end='')
 
 command="supervisorctl start twitter.db.altcoin1.py"
 p = os.system('sudo %s' % (command))    
 
 command="chown josh /home/josh/tw/* -Rf"
 p = os.system('sudo %s' % (command)) 
 
 command="chgrp josh /home/josh/tw/* -Rf"
 p = os.system('sudo %s' % (command))    
Beispiel #4
0
                    sys.stderr.write('no entry for {' + tag + '}' + hom + '.\n')

# Section names of the individual XML fragments that make up the database
memparts = ['header', 'b', 'ch', 'D', 'gh', 'H', 'j', 'l', 'm', 'n', 'ng', 'p',
            'q', 'Q', 'r', 'S' ,'t', 'tlh', 'v', 'w', 'y', 'a', 'e', 'I', 'o',
            'u', 'suffixes', 'extra', 'examples', 'footer']
filenames = []
concat=''
sdir = os.path.dirname(os.path.realpath(sys.argv[0]))

for i, part in enumerate(memparts):
    filenames.append(os.path.join(sdir,'mem-{0:02d}-{1}.xml'.format(i, part)))

# Concatenate the individual files into a single database string
mem = fileinput.FileInput(files=filenames)
for line in mem:
    concat += line
mem.close()

# Read the database version from the version file
ver = fileinput.FileInput(files=(os.path.join(sdir,'VERSION')))
version = next(iter(ver)).strip()
ver.close()

# Parse the database XML tree and store the parsed entries in a dict
xmltree = ET.fromstring(concat)
qawHaq = OrderedDict()
for child in xmltree[0]:
    node = EntryNode(child)
Beispiel #5
0
def replace_line_in_file(afile, match_string, replace_with):
    for line in fileinput.FileInput(afile, inplace=1):
        if match_string in line:
            line = replace_with
        print line,
Beispiel #6
0
def modify_sql_file(file, hostport):
    if os.path.isfile(file):
        for line in fileinput.FileInput(file, inplace=1):
            if line.find("gpfdist") >= 0:
                line = re.sub('(\d+)\.(\d+)\.(\d+)\.(\d+)\:(\d+)', hostport, line)
            print str(re.sub('\n', '', line))
Beispiel #7
0
    m = re.match('^values\-([a-z]{2})(-r([A-Z]{2}))?$', dir)

    if m is not None:
        # Language iso code
        isocode = m.group(1)
        if m.group(3) is not None:
            # Region code
            isocode += "_" +  m.group(3)

        langs.append(isocode)

#for isocode in sorted(langs):
#    print(isocode)

replacing = False
for line in fileinput.FileInput(arrayfile, inplace=1):
    if not replacing:
        print(line, end='')
    
    if 'name="translated_langs"' in line:
        replacing = True
        #Print list of languages
        #Locale default is empty string
        #print('{}<item></item>'.format(' '*8))
        for isocode in sorted(langs):
            print('{}<item>{}</item>'.format(' '*8, isocode))

    if replacing and '</string-array>' in line:
        replacing = False
        print(line, end='')
        thisList = readFiles[NFilesDone : NFilesDone+NSections]
        print "NFilesDone ", NFilesDone, "len(thisList)", len(thisList)

        ##you may have to give full path i.e. CurrentDIR/condor_submit/runlist_...
        inputRunListName = "/afs/cern.ch/work/i/idutta/private/HCAL_muALCA/CMSSW_10_1_6/src/Muon_Calibration_noTrack_2018/muon_calibration_ana/bash/condor/condor_submit/runList_"+data[0]+"_"+str(jobidx)+".txt"
        inputRunList = open(inputRunListName, "w")
        for line in thisList:
            inputRunList.write(line)

        condorSubmit = "condor_submit/submitCondor_"+data[0]+"_"+str(jobidx)
        jobName      = "20Oct2018"+data[0]+"_job"+str(jobidx)
        #outHistFile = data[0]+"_job"+str(jobidx)+".root"
        #isData       ="T"
        #isData       ="F"
        shutil.copyfile("proto_condor_submit",condorSubmit)
        for line in fileinput.FileInput(condorSubmit, inplace=1):
            line=line.replace("JOBNAME", jobName)
            line=line.replace("FILELIST",inputRunListName)
            #line=line.replace("ROOTOUT",outHistFile)
            #line=line.replace("DATANAME",dataname)
            #line=line.replace("ISDATA",isData)
            line=line.replace("OUTDIR",outDir)
            print line.rstrip()
        
        submitCommand = "condor_submit "+condorSubmit
        print submitCommand
        os.system(submitCommand)     
        jobidx = jobidx+1
        NFilesDone = NFilesDone + len(thisList)

    print "Final NFilesDone ", NFilesDone
Beispiel #9
0
def createCcvmXml(args):
    try:
        # 템플릿 파일을 /usr/share/cockpit/ablestack/tools/vmconfig/ccvm 경로로 복사
        for host_name in args.host_names:

            slot_hex_num = generateDecToHex()
            br_num = 0

            os.system("yes|cp -f " + pluginpath +
                      "/tools/xml-template/ccvm-xml-template.xml " +
                      pluginpath + "/tools/vmconfig/ccvm/ccvm-temp.xml")

            template_file = pluginpath + '/tools/vmconfig/ccvm/ccvm-temp.xml'

            with fileinput.FileInput(template_file,
                                     inplace=True,
                                     backup='.bak') as fi:

                for line in fi:

                    if '<!--memory-->' in line:
                        line = line.replace('<!--memory-->', str(args.memory))
                    elif '<!--cpu-->' in line:
                        line = line.replace('<!--cpu-->', str(args.cpu))
                    elif '<!--management_network_bridge-->' in line:
                        mnb_txt = "    <interface type='bridge'>\n"
                        mnb_txt += "      <mac address='" + generateMacAddress(
                        ) + "'/>\n"
                        mnb_txt += "      <source bridge='" + args.management_network_bridge + "'/>\n"
                        mnb_txt += "      <target dev='vnet" + str(
                            br_num) + "'/>\n"
                        mnb_txt += "      <model type='virtio'/>\n"
                        mnb_txt += "      <alias name='net" + str(
                            br_num) + "'/>\n"
                        mnb_txt += "      <address type='pci' domain='0x0000' bus='0x00' slot='" + slot_hex_num.pop(
                            0) + "' function='0x0'/>\n"
                        mnb_txt += "    </interface>"

                        br_num += 1
                        line = line.replace('<!--management_network_bridge-->',
                                            mnb_txt)
                    elif '<!--service_network_bridge-->' in line:
                        if args.service_network_bridge is not None:
                            snb_txt = "    <interface type='bridge'>\n"
                            snb_txt += "      <mac address='" + generateMacAddress(
                            ) + "'/>\n"
                            snb_txt += "      <source bridge='" + args.service_network_bridge + "'/>\n"
                            snb_txt += "      <target dev='vnet" + str(
                                br_num) + "'/>\n"
                            snb_txt += "      <model type='virtio'/>\n"
                            snb_txt += "      <alias name='net" + str(
                                br_num) + "'/>\n"
                            snb_txt += "      <address type='pci' domain='0x0000' bus='0x00' slot='" + slot_hex_num.pop(
                                0) + "' function='0x0'/>\n"
                            snb_txt += "    </interface>"

                            br_num += 1
                            line = line.replace(
                                '<!--service_network_bridge-->', snb_txt)
                        else:
                            # <!--service_network_bridge--> 주석제거
                            line = ''

                    # 라인 수정
                    sys.stdout.write(line)

            os.system("scp " + pluginpath +
                      "/tools/vmconfig/ccvm/ccvm-temp.xml root@" + host_name +
                      ":" + pluginpath + "/tools/vmconfig/ccvm/ccvm.xml")

        #작업파일 지우기
        os.system("rm -f " + pluginpath +
                  "/tools/vmconfig/ccvm/ccvm-temp.xml " + pluginpath +
                  "/tools/vmconfig/ccvm/ccvm.xml.bak " + pluginpath +
                  "/tools/vmconfig/ccvm/ccvm-temp.xml.bak")

        # 결과값 리턴
        return createReturn(code=200, val={})

    except Exception as e:
        # 결과값 리턴
        print(e)
        return createReturn(code=500, val={})
Beispiel #10
0
def render(lang,
           truelang,
           top1,
           top2,
           center,
           bottom1,
           bottom2,
           template_variant=None):
    x = unicode(center).encode('ascii', 'xmlcharrefreplace')
    y = unicode(top1).encode('ascii', 'xmlcharrefreplace')
    yy = unicode(top2).encode('ascii', 'xmlcharrefreplace')
    z = unicode(bottom1).encode('ascii', 'xmlcharrefreplace')
    zz = unicode(bottom2).encode('ascii', 'xmlcharrefreplace')
    ly = reduce(lambda x, y: sjoin(x, u" ", y), [y, yy])
    lz = reduce(lambda x, y: sjoin(x, u" ", y), [z, zz])

    font_repl = None
    if truelang in font_override:
        font_repl = font_override[truelang]
    else:
        font_repl = default_font
        pass

    for size in sizes:
        if not (size[2] in options.sizes):
            continue

        if y != None and len(y) > 0:
            t = "-top"
        else:
            t = None
            pass

        for var in varlist:
            if template_variant == None:
                if t != None:
                    template = "%s-%dx%d%s%s.svg" % (PREFIX, size[0], size[1],
                                                     var, t)
                if t == None or not os.path.exists(template):
                    template = "%s-%dx%d%s.svg" % (PREFIX, size[0], size[1],
                                                   var)
            else:
                if t != None:
                    template = "%s-%dx%d%s%s-%s.svg" % (
                        PREFIX, size[0], size[1], var, t, template_variant)
                if t == None or not os.path.exists(template):
                    template = "%s-%dx%d%s-%s.svg" % (PREFIX, size[0], size[1],
                                                      var, template_variant)
                pass

            if not os.path.exists(template):
                if options.verbose:
                    print "skipping %s / %s / %s: template \"%s\" does not exist" % (
                        lang, var, size[2], template)
                    pass
                if var:
                    print >> sys.stderr, "Needed template \"%s\" is missing. Aborting" % (
                        template)
                    sys.exit(1)
                continue

            outfile = "%s/%s%s.%s.png" % (outdir, size[2], var, lang)

            if options.verbose:
                print "%s / %s / %s: %s -> %s" % (lang, var, size[2], template,
                                                  outfile)
                pass

            workfile = os.path.join(workdir, "work.svg")
            out = open(workfile, "wb")
            for line in fileinput.FileInput(template, mode="rb"):
                line = unicode(line)
                line = line.replace(u"@@", x).replace(u"@TOPC@", y).replace(
                    u"@TOP@", yy).replace(u"@BOTTOM@",
                                          z).replace(u"@BOTTOMC@", zz)
                line = line.replace(u"@_TOP_@", ly).replace(u"@_BOTTOM_@", lz)
                line = line.replace(u"##.#", VERSION)

                if lang in extra:
                    for s, r in extra[lang].iteritems():
                        line = line.replace(
                            s,
                            unicode(r).encode('ascii', 'xmlcharrefreplace'))
                        pass
                    pass

                if font_repl != None:
                    line = line.replace(font_to_replace, unicode(font_repl))
                    pass
                out.write(line)
                pass
            out.close()

            rc = call_render(workfile, outfile, size[0], size[1])
            if options.keep:
                svg_outfile = "%s/%s%s.%s.%s.svg" % (outdir, PREFIX, var,
                                                     size[2], lang)
                shutil.copyfile(workfile, svg_outfile)
                if options.verbose:
                    print "SVG saved as %s" % svg_outfile
                    pass
                pass

            if rc != 0:
                print >> sys.stderr, "ERROR: call to inkscape failed for %s" % workfile
            pass
        pass
    pass
Beispiel #11
0
outfile = open(outfile_name,"w")
for word in wordsDict:
    outfile.write(word+'\n')

#################################################################################
#removes numbers

table = str.maketrans(dict.fromkeys('0123456789'))

with open("temp2.txt", 'r') as f_in:
    data = f_in.read()
data = data.translate(table)
with open(outfile_name, 'w') as f_out:
    f_out.write(data)

for line in fileinput.FileInput(outfile_name,inplace=1):
    if line.rstrip():
        print (line)
        
if os.path.isfile("temp2.txt"):
  os.remove("temp2.txt")

#################################################################################

#removes duplicate words

infile = open("outfile.txt","r")
wordsDict = {}
for line in infile:
    addBoolean = True
    for word in wordsDict:
Beispiel #12
0
async def login(reader, writer):
    """
    This function makes the user login.If the input user name or password
    is wrong or doesn't exists,
    it gives a notification that the username or password is wrong.
    If the user is already loggedin and tries to login again,
    it shows a warning that the access denied to the person who is trying to login again.
    If the user name and password are correct and the user is logging in without
    any active login record,the login will be made successful and displays the further action
    that the user want to perform.
    Now the user selects the required option.
    if the option is '1' or 'Create new folder' or 'create new folder',
    a folder with desired name is created.
    if the option is '2' or 'Create a new file' or 'create a new file',
    a file in the desired folder with desired name having data
    entered by client is created.
    if the option is 3 or 'Open the file' or 'open the file', the created file can be read.
    if the option is 4 or 'View list of folders' or 'view list of folders'
    or 'folders list', it displays the list of folders, the data it consumed,
    date of creation and date of modification.
    if the option is 5 or 'change_folder',user can change folder path.
    if the option is 6 or 'Rename folder',user can rename folder.
    if the option is '0' or 'Logout' or 'logout', the user will be logged out from the client space.
    All these functions of selected options will be imported from operations code
    """

    display_data = """\t\tPlease enter your login details
    USER ID   : """
    writer.write(display_data.encode())
    data = await reader.read(1000)
    user_id = data.decode().strip()
    display_data = """      PASSWORD : """
    writer.write(display_data.encode())
    data = await reader.read(1000)
    password_entered = data.decode().strip()
    filepath = "C:\\Users\\mscrs\\Desktop\\root\\admin\\Register.txt"
    init = 0
    i = True
    #If user already logged in, The login access should be denied
    with open(filepath, 'r') as regis:
        for line in regis:
            if user_id in line:
                if password_entered in line:
                    init = 1
                    if "Status : Logged in" in line:
                        display_data = "Access denied\nThe user is already logged in\nPlease restart server and client"
                        writer.write(display_data.encode())
                        i = False
                        break
    if i:
        for line in fileinput.FileInput(filepath, inplace=1):
            if user_id in line:
                if password_entered in line:
                    line = line.rstrip()
                    line = line.replace(line, line + "Status : Logged in\n")
            print(line, end='')
    #If username or password is wrong, a warning should be displayed
    if init == 0:
        display_data = "Invalid Username or Password\nPlease restart server and client"
        writer.write(display_data.encode())
    #If username and password correct, user will be logged in
    elif init == 1 and i:
        try:
            display_data = """--------------------Welcome :)--------------------\n
How can I help you
                1   -->  Create new folder
                2   -->  Create a new file
                3   -->  Open the file
                4   -->  View list of folders
                5   -->  Change folder or directory
                6   -->  Rename folder
                0   -->  Logout
                Please select the option :  """
            writer.write(display_data.encode())
            user_operations = Operations(user_id)
            while True:
                data = await reader.read(1000)
                command_given = data.decode().strip()
                if command_given in ('1', 'create new folder',
                                     'Create new folder', 'create_folder'):
                    display_data = "Name : "
                    writer.write(display_data.encode())
                    data = await reader.read(1000)
                    input_file_name = data.decode().strip()
                    filenamed = user_operations.folder_creation(
                        input_file_name)
                    if filenamed:
                        display_data = "Folder Created!"
                        writer.write(display_data.encode())
                    else:
                        display_data = "Folder Exists,please give another name"
                        writer.write(display_data.encode())
                elif command_given in ('2', 'create a new file',
                                       'Create a new file', 'write_file'):
                    display_data = "Enter folder name : "
                    writer.write(display_data.encode())
                    data = await reader.read(100)
                    folder_name = data.decode().strip()
                    display_data = "Enter file name : "
                    writer.write(display_data.encode())
                    data = await reader.read(100)
                    file_name = data.decode().strip()
                    display_data = "Enter the data : "
                    writer.write(display_data.encode())
                    data = await reader.read(100)
                    input_data = data.decode().strip()
                    i = user_operations.write_file(folder_name, file_name,
                                                   input_data)
                    if i:
                        display_data = "File Created"
                        writer.write(display_data.encode())
                    else:
                        display_data = "Folder doesn't exist"
                        writer.write(display_data.encode())
                elif command_given in ('3', 'open the file', 'Open the file',
                                       'read_file'):
                    display_data = "Enter folder name : "
                    writer.write(display_data.encode())
                    data = await reader.read(500)
                    folder_name = data.decode().strip()
                    display_data = "Name of the file : "
                    writer.write(display_data.encode())
                    data = await reader.read(500)
                    file_name = data.decode().strip()
                    fil = user_operations.read_the_file(folder_name, file_name)
                    if fil:
                        read_file = str(
                            user_operations.read_the_file(
                                folder_name, file_name))
                        writer.write(read_file.encode())
                    else:
                        display_data = "File doesn't exist"
                        writer.write(display_data.encode())
                elif command_given in ('4', 'View list of folders',
                                       'view list of folders', 'folders list',
                                       'list'):
                    name_of_file, size_of_file, created_time, modified_time = user_operations.list_of_directories(
                    )
                    display_data = "Name Size     Date of creation      Date of modification"
                    writer.write(display_data.encode())
                    data = await reader.read(100)
                    display_data = data.decode().strip()
                    show_folder_name = str(" ".join(map(str, name_of_file)))
                    writer.write(show_folder_name.encode())
                    data = await reader.read(100)
                    display_data = data.decode().strip()
                    show_folder_size = str(" ".join(map(str, size_of_file)))
                    writer.write(show_folder_size.encode())
                    data = await reader.read(100)
                    display_data = data.decode().strip()
                    show_created_time = str("  ".join(map(str, created_time)))
                    writer.write(show_created_time.encode())
                    data = await reader.read(100)
                    display_data = data.decode().strip()
                    show_modified_time = str("  ".join(map(str,
                                                           modified_time)))
                    writer.write(show_modified_time.encode())
                    data = await reader.read(100)
                    display_data = data.decode().strip()
                elif command_given in ('5', 'change_folder', 'Change folder',
                                       'change folder'):
                    display_data = "Enter Folder name : "
                    writer.write(display_data.encode())
                    data = await reader.read(10000)
                    old_name_of_folder = data.decode().strip()
                    new = user_operations.change_folder_path(
                        old_name_of_folder)
                    if new:
                        display_data = "Path changed successfully"
                    else:
                        display_data = "Path not changed"
                    writer.write(display_data.encode())
                elif command_given in ('6', 'Rename folder', 'rename folder'):
                    display_data = "Enter Folder name : "
                    writer.write(display_data.encode())
                    data = await reader.read(10000)
                    old_name_of_folder = data.decode().strip()
                    display_data = "Rename : "
                    writer.write(display_data.encode())
                    data = await reader.read(1000)
                    new_name = data.decode().strip()
                    new = user_operations.change_folder_name(
                        old_name_of_folder, new_name)
                    if new:
                        display_data = "Rename successful"
                    else:
                        display_data = "Folder name exists"
                    writer.write(display_data.encode())
                elif command_given in ('0', 'Logout', 'logout'):
                    break
        except Exception as excep:
            print(excep)
        finally:
            for line in fileinput.FileInput(filepath, inplace=1):
                if user_id in line:
                    if password_entered in line:
                        line = line.rstrip()
                        line = line.replace(
                            line, f"{user_id}:{password_entered},\n")
                print(line, end='')
            display_data = "Logged out"
            writer.write(display_data.encode())
    else:
        data = await reader.read(1000)
        display_data = "Invalid choice\nPlease restart the client\nIf not working kindly restart server"
        writer.write(display_data.encode())
                (
                        '"' + '\r'
                ) + \
                (
                        'fi' + '\r'
                ) + \
                (
                        '}' + '\r'
                ) + \
                (
                    '#' + '\r'
                ) + \
                (
                    '#NEWFUNK'
                )
# The code below actually changes the main script ('.backup' is added to the original file)
with fileinput.FileInput('Mikrotik_backuper.sh', inplace=True, backup='.backup') as file:
    for line in file:
        line = line.rstrip()
        print(tamplate_variable if line == '#NEWVARIABLE' else line)
with fileinput.FileInput('Mikrotik_backuper.sh', inplace=True, backup='.backup2') as file:
    for line in file:
        line = line.rstrip()
        print(tamplate_func if line == '#NEWFUNK' else line)
os.unlink('Mikrotik_backuper.sh' + '.backup2')
with fileinput.FileInput('Mikrotik_backuper.sh', inplace=True, backup='.backup3') as file:
    for line in file:
        line = line.rstrip()
        print(tamplate_name if line == '# NEWNAME' else line)
os.unlink('Mikrotik_backuper.sh' + '.backup3')
import fileinput

with fileinput.FileInput("webcam_list.txt", inplace=True,
                         backup='.bak') as file:
    for line in file:
        print(line.replace("shuyang", "rahul/dataset"), end='')
Beispiel #15
0
def _replace_in_file(filename, old, new):
    """ Replaces old with new in file filename. """
    for line in fileinput.FileInput(filename, inplace=1):
        line = line.replace(old, new)
        print(line, end='')
    fileinput.close()

def process(n):
    global INH
    global SIDES
    faces = 0
    n = int(n)
    for i in range(0, n):
        faces += SIDES[INH.readline().rstrip()]
    _print(faces)


def _print(faces):
    print faces


##################
#   Main parts   #
##################
INPUT = None
if len(sys.argv) > 2:
    INPUT = sys.argv[1]

INH = fileinput.FileInput(files=INPUT)
l1 = INH.readline()
while l1:
    process(l1)
    l1 = INH.readline()

INH.close()
Beispiel #17
0
def build(name, options):

    dir_list = open(projects_path, 'r').readlines()
    dir_list.append('lime')
    dir_list.append('box2d/src')
    dir_list.append('closure')

    #dir_list = filter(lambda x: os.path.isdir(os.path.join(basedir,x)) and ['.git','bin','docs'].count(x)==0 ,os.listdir(basedir))

    opt = ' '.join(
        map(lambda x: '--root="' + os.path.join(basedir, x.rstrip()) + '/"',
            dir_list))

    call = 'python ' + escapeSpace(os.path.join(closure_dir,'closure/bin/build/closurebuilder.py'))+' '+opt+' --namespace="'+name+'" '+\
        '-o compiled -c '+compiler_path

    if options.advanced:
        call += " -f --compilation_level=ADVANCED_OPTIMIZATIONS"

    if options.debug:
        call += " -f --debug -f --formatting=PRETTY_PRINT"

    if options.externs_file:
        for i, opt in enumerate(options.externs_file):
            call += " -f --externs=" + opt

    outname = options.output
    if options.output[-3:] != '.js':
        outname += '.js'

    if options.map_file:
        call += " -f --formatting=PRETTY_PRINT -f --source_map_format=V3 -f --create_source_map=" + outname + '.map'
    else:
        call += " -f --define='goog.DEBUG=false'"

    if options.use_strict:
        call += " -f --language_in=ECMASCRIPT5_STRICT"

    if options.define:
        for i, opt in enumerate(options.define):
            call += " -f --define='" + opt + "'"

    if options.output:
        call += ' --output_file="' + outname + '"'
        if not exists(os.path.dirname(outname)):
            os.makedirs(os.path.dirname(outname))

    errhandle = 0
    try:
        subprocess.check_call(call, shell=True)
    except subprocess.CalledProcessError:
        # handle error later
        errhandle = 1
        pass

    if options.map_file:
        map_filename = outname + '.map'
        map_file = open(map_filename, 'r+')

        # make source paths relative in map file
        data = json.load(map_file)
        data['sources'] = map(
            lambda p: os.path.relpath(p, os.path.dirname(map_filename)),
            data['sources'])
        map_file.close()
        map_file = open(map_filename, 'w')
        json.dump(data, map_file)
        map_file.close()

        # add path to map file
        out_file = open(outname, 'a')
        out_file.write('\n//@ sourceMappingURL=' +
                       os.path.relpath(map_filename, os.path.dirname(outname)))
        out_file.close()

    if options.output and options.preload:
        name = os.path.basename(outname)[:-3]
        target = os.path.dirname(outname)
        source = os.path.join(basedir, 'lime/templates/preloader')

        for root, dirs, files in os.walk(source):

            for fname in files:
                from_ = join(root, fname)
                to_ = from_.replace(source, target, 1)
                to_directory = split(to_)[0]
                to_ = to_.replace('__name__', name)
                if not exists(to_directory):
                    os.makedirs(to_directory)
                if not exists(to_):
                    copyfile(from_, to_)

        for root, dirs, files in os.walk(target):

            for fname in files:
                if exists(os.path.join(target, fname)):
                    for line in fileinput.FileInput(os.path.join(
                            target, fname),
                                                    inplace=1):
                        line = line.replace('{name}', name)
                        line = line.replace('{callback}', options.preload)

                        if fname == name + '.manifest':
                            line = re.sub(
                                r'# Updated on:.*', '# Updated on: ' +
                                datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                                line)
                        print(line.rstrip())

    if errhandle == 1:
        exit(1)
Beispiel #18
0
def replace_text(filename: str, text_to_search: str, replacement_text: str):
    with fileinput.FileInput(filename, inplace=True) as file:
        for line in file:
            print(line.replace(text_to_search, replacement_text), end='')
Beispiel #19
0
def domainsorted_async(request_user):
    """ exports markdown report for all systems sorted by domain """

    # call directory cleaning function
    clean_directory.clean_directory(request_user)

    # get all domains
    domains = Domain.objects.all()

    # (re)create markdown directory for existing domains
    if len(domains) > 0:
        for domain in domains:
            os.mkdir(markdown_path + "/docs/systems/" + domain.domain_name)

    # create directory for systems without domains
    os.mkdir(markdown_path + "/docs/systems/other_domains/")

    # get all systems
    systems = System.objects.all().order_by('domain', 'system_name')

    # create empty list and dict (needed for mkdocs.yml)
    systemlist = []
    systemdict = {}
    domaindict = {}
    domainlist = []

    # iterate over systems
    for system in systems:

        # skip system depending on export variable
        if system.system_export_markdown == False:
            continue

        # call writing function (and get return values)
        rid, rfqdn, rpath, rdomain = write_report_domainsorted(
            system, request_user)
        """ build a dict that is used for the system section in mkdocs.yml """

        # build string as key for systemdict (needed for mkdocs.yml)
        index = rfqdn + " (" + rid + ")"
        # add value to key in 1-value dict (needed for mkdocs.yml)
        systemdict[index] = rpath
        # check whether domain was already used as second level entry (headline)
        if rdomain in domainlist:
            # add placeholder instead of (r)domain (because it was used before)
            domaindict["already_used_domain"] = systemdict
        else:
            # add dict to another 1-value dict (needed for mkdocs.yml)
            if rdomain == "other_domains":
                # add "Other domains"
                domaindict["Other domains"] = systemdict
            else:
                # add real domains
                domaindict[rdomain] = systemdict
            # add (r)domain to domainlist so it will not be written to systemlist the next iteration
            domainlist.append(rdomain)
        # add dict to list (needed for mkdocs.yml)
        systemlist.append(domaindict)
        # set dicts to empty dicts (for next iteration)
        systemdict = {}
        domaindict = {}

    # get path for mkdocs.yml
    mkdconfpath = markdown_path + "/mkdocs.yml"

    # open mkdocs.yml for reading
    mkdconffile = open(mkdconfpath, "r")

    # read YAML to dict
    mkdconfdict = yaml.load(mkdconffile)

    # close mkdocs.yml
    mkdconffile.close()

    # get pages list
    mkdconflist = mkdconfdict['pages']

    # set counter
    i = 0
    j = 0

    # iterate over 'pages' list
    for item in mkdconflist:

        # find subsection 'Systems' in list
        try:
            dummy = item['Systems']
            # set index
            j = i
        except:
            # do nothing
            pass

        # autoincrement counter for next loop
        i += 1

    # set at dict 'Systems' in list mkdconflist at index j (replace section 'Systems')
    mkdconflist[j]['Systems'] = systemlist

    # set pages with old entries and new 'Systems' section
    mkdconfdict['pages'] = mkdconflist

    # open mkdocs.yml for writing (new file)
    mkdconffile = open(mkdconfpath, "w")
    # write mkdocs.yml
    yaml.dump(mkdconfdict,
              mkdconffile,
              default_flow_style=False,
              default_style='"')
    # close mkdocs.yml
    mkdconffile.close()
    """ adds hyphens for third level entries (are generated without but mkdocs needs them) """

    # open mkdocs.yml again for inplace replacement
    mkdconffile = fileinput.FileInput(mkdconfpath, inplace=True)
    # iterate over lines in mkdocs.yml
    for line in mkdconffile:
        # add hyphen for entries in third level (needed by mkdocs)
        line = re.sub(r"^      ", "    - ", line.rstrip())
        print(line)
    """ remove placeholder 'already_used_domain' that was created before for second level entries """

    # open mkdocs.yml again for inplace replacement
    mkdconffile = fileinput.FileInput(mkdconfpath, inplace=True)
    # iterate over lines in mkdocs.yml
    for line in mkdconffile:
        # change placeholder to empty line
        line = re.sub(r'^  - "already_used_domain":', '', line.rstrip())
        print(line)
    """ remove empty lines created before instead of placeholder """

    # open mkdocs.yml
    with open(mkdconfpath) as filehandle:
        # read all lines
        lines = filehandle.readlines()
        # filter out every line with nothing in it ('strip'ed to nothing)
        lines = filter(lambda x: x.strip(), lines)
    # open mkdocs.yml for writing
    with open(mkdconfpath, 'w') as filehandle:
        # write filtered lines back to file
        filehandle.writelines(lines)

    # call logger
    debug_logger(request_user, " SYSTEM_MARKDOWN_ALL_SYSTEMS_END")
Beispiel #20
0
Datei: is.py Projekt: MuYoul/infs
    #git branch(jenkins)를 생성한다
    exec_command(["git", "checkout", "-b", proto_test_branch])

    #valve ctl을 적용한다
    exec_command([
        "valved", "fetch", "--name", "java-mvn-springboot", "--overwrite",
        "--group sample", "--service ifsp"
    ])

    #proto branch를 deploy하도록 Jenkinsfile을 변경해준다
    import fileinput

    filename = "Jenkinsfile"
    text_to_search = "master"
    replacement_text = proto_test_branch
    with fileinput.FileInput(filename, inplace=True, backup='.bak') as file:
        for line in file:
            print(line.replace(text_to_search, replacement_text), end='')

    exec_command(["git", "add", "."])
    exec_command(["git", "commit", "-m", "auto generator by ifs proto type"])

    #git push
    exec_command(
        ["git", "push", "--set-upstream", "origin", proto_test_branch])

    #TODO jenkins job생성(이건 생성되어있다고 가정하고 넘어가자~)
    #jenkins rest api호출
    # job_url="https://jenkins-devops.coruscant.opsnow.com/job/coruscant-samples/job/lemy-sample-java/job/proto_test_branch/build?token=11d76e49d3768fbceda674b237d8538290"
    jenkins = "https://jenkins-devops.coruscant.opsnow.com/job/coruscant-samples/job/lemy-sample-java/job/proto_test_branch/build?token=11d76e49d3768fbceda674b237d8538290"
Beispiel #21
0
remote_latest_image = REMOTE_IMAGE_PREFIX + latest_image
call("docker tag " + latest_image + " " + remote_latest_image)

if args.deploy:
    call("docker push " + remote_versioned_image)

    if "SNAPSHOT" not in args.version:
    # do not push SNAPSHOT builds as latest version
        call("docker push " + remote_latest_image)

# Create the Helm chart resource
import fileinput

chart_yaml = "./helmchart/mlhub/Chart.yaml"
values_yaml = "./helmchart/mlhub/values.yaml"
with fileinput.FileInput(chart_yaml, inplace=True, backup='.bak') as file:
    for line in file:
        print(line.replace("$VERSION", str(args.version)), end='')

with fileinput.FileInput(values_yaml, inplace=True, backup='.bak') as file:
    for line in file:
        print(line.replace("$VERSION", str(args.version)), end='')

try:
    call("helm package ./helmchart/mlhub -d helmchart")
except:
    print("There was a problem with the helm command")

os.replace(f"{chart_yaml}.bak", chart_yaml)
os.replace(f"{values_yaml}.bak", values_yaml)
import random
import fileinput
import random
phonefile = open('Phones.txt', 'r')
phones = []
for lines in phonefile:
    phones.append(lines)

with fileinput.FileInput('O_Customer.txt', inplace=True,
                         backup='.bak') as file:

    for line in file:
        phone1 = phones[random.randrange(0, 129)]
        phone2 = phones[random.randrange(0, 129)]
        print(line.replace("PHONES()",
                           ("PHONES(" + "\'" + str(phone1) + "\'" + ',' +
                            "\'" + str(phone2) + "\'" + ")")),
              end='')
Beispiel #23
0
        try:
            page = urllib2.urlopen(url).read()
        except HTTPError as e:
            if e.code == 502:
                time.sleep(sleep_time)
                count += 1
                continue
            if retry >= 5:
                sys.exit(1)
        break

    soup = BeautifulSoup(page, features="html.parser")
    csvout = csv.writer(f)

    for table in soup.findAll('table'):
        for row in table.findAll('tr'):
            csvout.writerow([tr.text for tr in row.findAll('td')])
    f.close()
    f = open(output, 'a')

    for line in fileinput.FileInput(tmp_file, inplace=1):
        if line.rstrip():
            f.write(line)

    if "Next page" in page:
        offset += 100
        time.sleep(sleep_time)
        continue
    break

sys.exit(0)
Beispiel #24
0
#!/usr/bin/python3
import os
import fileinput
import subprocess

acl_id = os.environ.get('ACL_ID')

if acl_id is not None:
    with fileinput.FileInput('/etc/fail2ban/action.d/aws-acl-action.conf',
                             inplace=True,
                             backup='.bak') as file:
        for line in file:
            print(line.replace('<acl-id>', acl_id), end='')
else:
    print('ERROR: Please run docker container with environment variable')
Beispiel #25
0
                prop_dict['sh:or'].append({'sh:class': 'schema:' + allowed})

                if allowed == 'Text':
                    prop_dict['sh:or'].append({"sh:datatype": "xsd:string"})

                elif allowed == 'URL':
                    prop_dict['sh:or'].append({"sh:datatype": "xsd:anyURL"})
        else:
            prop_dict['sh:class'] = "schema:" + expected_types[0]

        specGraph['sh:property'].append(prop_dict)

    shacl_rules['sh:shapeGraph'].append(specGraph)

with open('bioschemas_shacl.json', 'w') as out:
    json.dump(shacl_rules, out, indent=4, separators=(',', ': '))

g = rdflib.Graph()
g.parse("bioschemas_shacl.json",
        format="json-ld",
        publicID="http://bioschemas.org/specifications/")

g.serialize(destination='shacl.ttl', format='turtle')

import fileinput

with fileinput.FileInput('shacl.ttl', inplace=True) as file:
    for line in file:
        print(line.replace('"', ''), end='')
Beispiel #26
0
def main():
    mincols = 1
    upstream_pad = 0
    downstream_pad = 0

    options, args = doc_optparse.parse(__doc__)
    try:
        chr_col_1, start_col_1, end_col_1, strand_col_1 = parse_cols_arg(
            options.cols1)
        chr_col_2, start_col_2, end_col_2, strand_col_2 = parse_cols_arg(
            options.cols2)
        if options.mincols: mincols = int(options.mincols)
        pieces = bool(options.pieces)
        in1_gff_format = bool(options.gff1)
        in2_gff_format = bool(options.gff2)
        in_fname, in2_fname, out_fname = args
    except:
        doc_optparse.exception()

    # Set readers to handle either GFF or default format.
    if in1_gff_format:
        in1_reader_wrapper = GFFReaderWrapper
    else:
        in1_reader_wrapper = NiceReaderWrapper
    if in2_gff_format:
        in2_reader_wrapper = GFFReaderWrapper
    else:
        in2_reader_wrapper = NiceReaderWrapper

    g1 = in1_reader_wrapper(fileinput.FileInput(in_fname),
                            chrom_col=chr_col_1,
                            start_col=start_col_1,
                            end_col=end_col_1,
                            strand_col=strand_col_1,
                            fix_strand=True)
    if in1_gff_format:
        # Intersect requires coordinates in BED format.
        g1.convert_to_bed_coord = True
    g2 = in2_reader_wrapper(fileinput.FileInput(in2_fname),
                            chrom_col=chr_col_2,
                            start_col=start_col_2,
                            end_col=end_col_2,
                            strand_col=strand_col_2,
                            fix_strand=True)
    if in2_gff_format:
        # Intersect requires coordinates in BED format.
        g2.convert_to_bed_coord = True

    out_file = open(out_fname, "w")
    try:
        for feature in intersect([g1, g2], pieces=pieces, mincols=mincols):
            if isinstance(feature, GFFFeature):
                # Convert back to GFF coordinates since reader converted automatically.
                convert_bed_coords_to_gff(feature)
                for interval in feature.intervals:
                    out_file.write("%s\n" % "\t".join(interval.fields))
            elif isinstance(feature, GenomicInterval):
                out_file.write("%s\n" % "\t".join(feature.fields))
            else:
                out_file.write("%s\n" % feature)
    except ParseError, e:
        out_file.close()
        fail("Invalid file format: %s" % str(e))
Beispiel #27
0
import fileinput, string, sys, re

for line in fileinput.FileInput(sys.argv[1], inplace=1):
    line = re.sub('[.,)(?"“”:;!]', r'', line, flags = re.M)
    line = re.sub("’s ", r' ', line, flags = re.M)
    line = re.sub("'s ", r' ', line, flags = re.M)
    line = re.sub("' s ", r' ', line, flags = re.M)
    line = re.sub(" – ", r' ', line, flags = re.M)
    line = re.sub("– ", r'', line, flags = re.M)
    line = re.sub("- ", r'', line, flags = re.M)
    line = re.sub(" -", r'', line, flags = re.M)
    line = re.sub("['’]", r'', line, flags = re.M)
    line = re.sub("[a-z-0-9]*/[a-z-0-9]*", r'', line, flags = re.M)
    line = re.sub("[A-Z]*[0-9]", r'', line, flags = re.M)
    line = re.sub("[0-9]", r'', line, flags = re.M)
    line = re.sub("  [ ]*", r' ', line, flags = re.M)
    sys.stdout.write(line)
Beispiel #28
0
 def input(self):
     return fileinput.FileInput(files=self.files,
                                openhook=fileinput.hook_compressed)
Beispiel #29
0
def configure_honeypot_replacement(DB_settings,
                                   datacenter_settings,
                                   old_hp_infos,
                                   new_hp_infos={},
                                   link=None):
    '''
    Configure servers to remove or replace by new one, the old honeypot for
    one link or for all, by modifying the Nginx conf

    ARGUMENTS:
        DB_settings (json) : auth information
        datacenter_settings (json) : datacenter auth information
        old_hp_infos (dict) : old honeypot information subject to removal
            or replacement
        new_hp_infos (dict) - optional : new honeypot information for
            replacement
        link (dict) - optional : link information subject to reconfiguration

    Raise error if something failed
    '''

    # Configure servers to replace by new one, the old honeypot for all of its
    # link
    if old_hp_infos != {} and new_hp_infos != {} and link is None:
        # Loop through all of the links using the old hp
        for link in old_hp_infos["links"]:
            # Variable declaration and initialisation to save servers and the
            # already modified configs
            already_update = []
            servers = []
            # Loop through all of the servers used by the link
            for server in link["servs"]:
                # Find the path of the configuration used by the server for the
                # link
                nginxRedirectionPath = "/data/template/" + \
                    str(link["link_id"]) + "-" + \
                    str(server["lhs_port"]) + ".conf"
                # If the configuration has not already been modified
                if not (nginxRedirectionPath in already_update):
                    # Modify the nginx conf to replace the old honeypot by the
                    # new one
                    with fileinput.FileInput(nginxRedirectionPath,
                                             inplace=True,
                                             backup='.bak') as file:
                        first_line = False
                        for line in file:
                            if ("  # " + str(old_hp_infos["hp_id"])) in line:
                                print(line.replace(str(old_hp_infos["hp_id"]),
                                                   str(new_hp_infos["hp_id"])),
                                      end='')
                                first_line = True
                            elif first_line:
                                print(line.replace(
                                    str(old_hp_infos["hp_port"]),
                                    str(new_hp_infos["hp_port"])),
                                      end='')
                                first_line = False
                            else:
                                print(line, end='')
                    # Add the config to the list of already updated configs
                    already_update.append(nginxRedirectionPath)
                # Prepare the deploiment of nginx config
                server["choosed_port"] = server["lhs_port"]
                servers.append(server)
            # Deploy the new Nginx config
            add_link.deploy_nginxConf(DB_settings, link["link_id"], servers)

    # Configure servers to replace by new one, the old honeypot for one of its
    # link
    elif old_hp_infos != {} and new_hp_infos != {} and link is not None:
        # Variable declaration and initialisation to save servers and the
        # already modified configs
        already_update = []
        servers = []

        # Finds the information of the servers used for the link, either
        # directly in the link, or in the information of the old honeypot
        interable_servs = link["servs"] if "servs" in link.keys(
        ) else old_hp_infos["servs"]

        # Loop through all of the servers used by the link
        for server in interable_servs:
            # Find the path of the configuration used by the server for the
            # link
            nginxRedirectionPath = "/data/template/" + \
                str(link["link_id"]) + "-" + str(server["lhs_port"]) + ".conf"
            # If the configuration has not already been modified
            if not (nginxRedirectionPath in already_update):
                # Modify the nginx conf to replace the old honeypot by the new
                # one
                with fileinput.FileInput(nginxRedirectionPath,
                                         inplace=True,
                                         backup='.bak') as file:
                    first_line = False
                    for line in file:
                        if ("  # " + str(old_hp_infos["hp_id"])) in line:
                            print(line.replace(str(old_hp_infos["hp_id"]),
                                               str(new_hp_infos["hp_id"])),
                                  end='')
                            first_line = True
                        elif first_line:
                            print(line.replace(str(old_hp_infos["hp_port"]),
                                               str(new_hp_infos["hp_port"])),
                                  end='')
                            first_line = False
                        else:
                            print(line, end='')
                # Add the config to the list of already updated configs
                already_update.append(nginxRedirectionPath)
            # Prepare the deploiment of nginx config
            server["choosed_port"] = server["lhs_port"]
            servers.append(server)
        # Deploy the new Nginx config
        add_link.deploy_nginxConf(DB_settings, link["link_id"], servers)

    # Configure servers to remove the old honeypot for one of its link
    elif old_hp_infos != {} and new_hp_infos == {} and link is not None:
        # Variable declaration and initialisation to save servers and the
        # already modified configs
        already_update = []
        servers = []

        # Finds the information of the servers used for the link, either
        # directly in the link, or in the information of the old honeypot
        interable_servs = link["servs"] if "servs" in link.keys(
        ) else old_hp_infos["servs"]

        # Loop through all of the servers used by the link
        for server in interable_servs:
            # Find the path of the configuration used by the server for the
            # link
            nginxRedirectionPath = "/data/template/" + \
                str(link["link_id"]) + "-" + str(server["lhs_port"]) + ".conf"
            # If the configuration has not already been modified
            if not (nginxRedirectionPath in already_update):
                # Modify the nginx conf to remove the old honeypot
                with fileinput.FileInput(nginxRedirectionPath,
                                         inplace=True,
                                         backup='.bak') as file:
                    first_line = False
                    for line in file:
                        if ("  # " + str(old_hp_infos["hp_id"])) in line:
                            print(line.replace(
                                "  # " + str(old_hp_infos["hp_id"]) + "\n",
                                ""),
                                  end='')
                            first_line = True
                        elif first_line:
                            print(line.replace(
                                "  server " +
                                str(datacenter_settings["hostname"]) + ":" +
                                str(old_hp_infos["hp_port"]) + ";\n", ""),
                                  end='')
                            first_line = False
                        else:
                            print(line, end='')
                # Add the config to the list of already updated configs
                already_update.append(nginxRedirectionPath)
            # Prepare the deploiment of nginx config
            server["choosed_port"] = server["lhs_port"]
            servers.append(server)
        # Deploy the new Nginx config
        add_link.deploy_nginxConf(DB_settings, link["link_id"], servers)

    # If we are not in one of the previous cases, the call to the function
    # and/or the passing of the arguments are not correct,
    # Then raise an error
    else:
        error = "Honeypot replacement configuration failed"
        logging.error(error)
        raise ValueError(error)
Beispiel #30
0
import os
import fileinput

Lines = open("manual query generation.txt", "r").readlines()

with open("cleaned_sparql_queries.txt", "w") as write_file:
    for line in Lines:
        if line.strip():
            write_file.write(line)

Lines = open("cleaned_sparql_queries.txt", "r").readlines()

with open("cleaned_sparql_queries2.txt", "w") as write_file:
    for line in Lines:
        if not line[0].isdigit():
            write_file.write(line)

os.remove("cleaned_sparql_queries.txt")

with fileinput.FileInput("cleaned_sparql_queries2.txt", inplace=True, backup='.bak') as file:
    for line in file:
        if "which" in line.lower():
            print(line.replace(line, ""), end='')
        print(line.replace("-> PREFIX", "PREFIX"), end='')