def testNormal(self):
        with FileWriter(self.TEST_FILE) as f:
            f.WriteLn("a").IncIndent()
            f.WriteLn("b").DecIndent().WriteLn("c")

        with open(self.TEST_FILE) as file:
            lines = file.readlines()
        self.assertEqual("a\n", lines[0])
        self.assertEqual("\tb\n", lines[1])
        self.assertEqual("c\n", lines[2])
예제 #2
0
파일: Main.py 프로젝트: daniel17903/gcmi2
def run_latency(warmup_time_s, upstream_ip, upstream_port, duration_s, ssl):
    switch = Switch(DPID("00:00:00:00:00:00:00:01"), warmup_time_s, upstream_ip, upstream_port, 1, False, ssl)
    switch.start()

    time.sleep(duration_s)
    print("stop!")

    switch.stop()

    FileWriter().write_results_to_file(switch.get_results())
예제 #3
0
 def __init__(self, initial_vaccinated, initial_infected, initial_healthy, virus, resultsfilename):
     # 'Set up the initial simulation values’
     self.virus = virus
     self.initial_infected = initial_infected
     self.initial_healthy = initial_healthy
     self.initial_vaccinated = initial_vaccinated
     self.population = []
     self.population_size = initial_infected + initial_healthy + initial_vaccinated
     self.total_dead = 0
     self.total_vaccinated = initial_vaccinated
     self.file_writer = FileWriter(resultsfilename)
    def testRemoveFromLastLine(self):
        with FileWriter(self.TEST_FILE) as f:
            f.WriteLn("abc")
            f.WriteLn("def")
            f.RemoveFromLastLine(1)
            f.WriteLn("123")

        with open(self.TEST_FILE) as file:
            lines = file.readlines()
        self.assertEqual("abc\n", lines[0])
        self.assertEqual("de\n", lines[1])
        self.assertEqual("123\n", lines[2])
예제 #5
0
    def eventLogFile(self, eventLogFile):
        """Retrieves a single Event File Log and writes it to the appropriate directory
        Parameters
        ----------
        param: eventLogFile
            ex:
            {
              'LogFileLength': 5199.0,
              'EventType': 'API',
              'LogDate': '2016-11-22T00:00:00.000+0000',
              'attributes': {
                'url': '/services/data/v32.0/sobjects/EventLogFile/0ATr00000000TWHGA2',
                'type': 'EventLogFile'
              },
              'LogFile': '/services/data/v32.0/sobjects/EventLogFile/0ATr00000000TWHGA2/LogFile',
              'Id': '0ATr00000000TWHGA2'
            }

        Returns
        -------
        csv containing event file log
        """
        if (self.accessToken == ''):
            raise ValueError(
                'accessToken has not been set, run authenticate method to set token'
            )
            exit

        eventFileId = eventLogFile['Id']
        headers = {
            'Authorization': 'Bearer ' + self.accessToken,
            'X-PrettyPrint': '1',
            'Accept-Encoding': 'gzip'
        }
        rawResponse = requests.get(
            'https://' + self.sfURL +
            '/services/data/v32.0/sobjects/EventLogFile/' + eventFileId +
            '/LogFile',
            headers=headers)

        if self.debug:
            print "[DEBUG] eventLogFile >> "
            print rawResponse
            print rawResponse.content

        # if self.log:
        #     w = FileWriter('log', eventFileId)
        #     w.writeFile(rawResponse.content)

        w = FileWriter(eventLogFile)
        w.writeFile(rawResponse.content)

        return rawResponse
예제 #6
0
 def __build_dictionary(self):
     print('Building dictionary')
     dict_words = []
     i = 0
     for text in self.data:
         i += 1
         print(
             "FeatureExtraction.__build_dictionary(): Step {} / {}".format(
                 i, len(self.data)))
         words = NLP(text=text['content']).get_words_feature()
         dict_words.append(words)
     FileWriter(
         filePath=Settings.DICTIONARY_PATH).store_dictionary(dict_words)
예제 #7
0
def DriveScrape():
    seedUrl = 'https://in.bookmyshow.com/national-capital-region-ncr/movies/'
    bookTicketsUrl = 'https://in.bookmyshow.com/buytickets/{}-national-capital-region-ncr/movie-ncr-{}-MT/{}'
    #bookTicketsUrl = 'https://in.bookmyshow.com/buytickets/{movieNameFromUrl}-national-capital-region-ncr/movie-ncr-{MovieId}}-MT/{TodaysDate}'
    l = GetMovieNameAndIdList(seedUrl)
    x = l.GetList()

    fw = FileWriter()
    fw.WriteScapeBMSFirstTimeFileForListOfMovies(x[0], x[1])

    details = GetFirstLevelDetails(seedUrl, x[0])
    movieInfoList = details.FillInMovieInfoDetails()

    fw = FileWriter()
    fw.WriteFirstLevelDetailsToCSV(movieInfoList, x[1])

    showDetails = GetShowSeatsMultiPlexDetails(movieInfoList, x[1],
                                               bookTicketsUrl)
    venueAndShowTimeInfoLists = showDetails.FillIntheShowAndMultiPlexDetails()

    fw = FileWriter()
    fw.WriteVenueList(venueAndShowTimeInfoLists[0], x[1])
    fw.WriteShowTimeInfoList(venueAndShowTimeInfoLists[1], x[1])
예제 #8
0
파일: Init.py 프로젝트: Edakksm/RedditEndo
 def __init__(self):
     config = ConfigParser()
     self.client_ID = config.__getattr__('client_id')
     self.client_Secret = config.__getattr__('client_secret')
     self.userName = config.__getattr__('username')
     self.password = config.__getattr__('password')
     self.subReddit = config.__getattr__('subreddit')
     self.limit =  config.__getattr__('limit')
     self.logFileName =  config.__getattr__('logfile')
     self.userFileName = config.__getattr__('userfile')
     self.start_duration = config.__getattr__('start_duration')
     self.end_duration = config.__getattr__('end_duration')
     self.pushShiftAPI = config.__getattr__('pushshiftapi')
     self.logger = Logger(self.logFileName)
     self.fileWriter = FileWriter(self.userFileName)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("-f",
                        "--file",
                        help="Please Enter the file name with -f  \n")
    parser.add_argument("-l",
                        "--label",
                        help="Please Enter the number of labels with -l \n",
                        type=int,
                        action=LabelValidator)

    # Read arguments from the command line
    args = parser.parse_args()

    if args.file and args.label:

        #Create the mesh generator object
        meshGenerator = MeshGenerator()

        #Read the file from the directory
        rawData = meshGenerator.readFile(args.file)

        #Process the raw data into 3d numpy array
        processedData = meshGenerator.processData(rawData)

        # Encode the labels in the volume data
        encodedData, actualLabels = meshGenerator.faceMaker.encodeDataWithLabels(
            processedData, args.label)

        # Check the neighbors and make the faces
        meshGenerator.makeFace(encodedData, actualLabels)

        #create the object for .obj and material file writer
        writer = FileWriter()

        #write Material file
        writer.writeMaterialFile(len(actualLabels))

        #Write the .obj files
        writer.writeObjectFile(meshGenerator.verticeList,
                               meshGenerator.faceList)

        print("Successful")

    else:
        usage()
        sys.exit(2)
 def serialize(self):
     writer = FileWriter()
     writer.clear()
     writer.initXml()
     writer.writeLine(0, ['<data>'])
     writer.writeLine(1, ['<info>'])
     writer.writeLine(2, ['<version>', DATA_VER, '</version>'])
     writer.writeLine(2,
                      ['<activeregion>', Content.region, '</activeregion>'])
     writer.writeLine(2, ['<regions>'])
     for region in Content.allregions:
         writer.writeLine(3, ['<region>', region, '</region>'])
     writer.writeLine(2, ['</regions>'])
     writer.writeLine(2, ['<name>', Content.projectName, '</name>'])
     writer.writeLine(1, ['</info>'])
     self._writeContent(writer, 1)
     writer.writeLine(0, ['</data>'])
     return writer.getContent()
def writeFeaturesToFile(sentence, filename):
    fileWriter = FileWriter(OUTPUT_PATH, filename)
    tagged = nltk.pos_tag(word_tokenize(sentence))
    for i in range(len(tagged)):
        feature = 'I\tcw={} ct={}'.format(tagged[i][0], tagged[i][1])
        if i == 0:
            feature = feature + ' pw=BOS pt=BOS'
        else:
            feature = feature + ' pw={} pt={}'.format(tagged[i - 1][0],
                                                      tagged[i - 1][1])
        if i == len(tagged) - 1:
            feature = feature + ' nw=EOS nt=EOS'
        else:
            feature = feature + ' nw={} nt={}'.format(tagged[i + 1][0],
                                                      tagged[i + 1][1])

        fileWriter.writeToFile(feature)
    fileWriter.writeToFile("\n")

    print(tagged)
예제 #12
0
 def test_it_writes_buble_sort_data_to_a_file(self):
     # given a file
     filename = os.getcwd() + 'bubble_out.txt'
     # and a stringIO
     out = StringIO()
     # and the data
     sort_size = 10
     number_of_buckets = 2
     bubble_sort_time = 0.0071
     sorted_array = [17, 22, 30, 31, 33, 38, 51, 63, 83, 95]
     # and the FileWriter
     fw = FileWriter(filename)
     # now override the file write
     fw.set_file_out(StringIO())
     # write the data
     fw.set_sort_size(sort_size).set_number_buckets(
         number_of_buckets).set_bubble_sort_time(
             bubble_sort_time).set_out_array(sorted_array).write()
     # now get the file
     out.seek(0)
     content = out.readline()
예제 #13
0
    def __init__(self, master):
        self.master = master

        master.title("Dialogue Editor")
        master.geometry("1024x768")
        master.rowconfigure(1, weight=1)
        master.columnconfigure(1, weight=1)

        self._setupMenuBar(master)

        self.writer = FileWriter()

        Content.initData()
        Content.mutateEvent.append(self.refreshViews)

        self.toprow = TopRowMenu(master)
        self.paneltree = PanelTree(master)
        self.paneltext = PanelText(master)
        self.paneldetails = PanelDetails.PanelDetails(master)

        self.refreshViews()
예제 #14
0
            if videoSize + filled > data.cacheCapacity:
                continue

            if cache in cacheContents and video in cacheContents[cache]:
                continue

            improvement = 0
            for endpoint in data.endpoints:
                try:
                    lat = endpoint.cacheServers[cache]
                    rqn = endpoint.requests[video]
                    # TODO check the video isn't already in a closer cache
                    improvement += (endpoint.latency - lat) * rqn
                except KeyError:
                    continue

            if improvement > score:
                bestVideo, score = video, improvement

        if bestVideo is None:
            break

        if cache in cacheContents:
            cacheContents[cache].append(bestVideo)
        else:
            cacheContents[cache] = [bestVideo]
        filled += data.videoSizes[bestVideo]

writer = FileWriter("example.out")
writer.writeData(cacheContents)
예제 #15
0
 def save_model(self, filePath):
     FileWriter(filePath=filePath).save_pickle(obj=self.estimator)
fd_obj = FaceDetector(img)
list_of_faces = fd_obj.detectFacesInImage()
#fd_obj.saveDetectedFaces(list_of_faces)

############################################################

print("############## FACE DETECTION COMPLETED !! ##################")
cam.release()
cv2.destroyAllWindows()

######## Using trained Classifier ##########################

ifk_obj = ImageFeederKNN()
ifk_obj.convertRawDataToTestData(list_of_faces)
predictions = ifk_obj.getPrediction(trained_pickle_name)
#print('The following roll numbers are present:')
#print(predictions)

####### FILE WRITER #################################

#predictions = [33,1,105,67]
try:
    course_code = str(sys.argv[1])
    course_code = course_code.upper()
    fw_obj = FileWriter(course_code)
    fw_obj.saveToFile(predictions)
except IndexError:
    print(
        "Specify course_code \n### USAGE ---> python AttendanceTaker.py <course_code> \n### replace <course_code> by CS-403 etc."
    )
initial_score = 0
initial_state = TreeState(caches_contents=initial_contents, score=initial_score,
                          problem=problem)

# Generate the optimal end state
mcts = MCTS(tree_policy=UCB1(c=1.41),
            default_policy=immediate_reward,
            backup=monte_carlo)

node = StateNode(parent=None, state=initial_state)

while True:
    if node.state.is_terminal():
        print("Terminal node reached.")
        break
    print("Finding best action")
    best_action = mcts(node)
    print("Performing action")
    node = StateNode(parent=None, state=node.state.perform(best_action))
    print("Score now is: %d" % node.state.score)

    print("Saving output")
    print(node.state.caches_contents)

    contents = node.state.caches_contents
    contents = _clean_solution(contents)
    dictionary = {i: e for i, e in enumerate(contents) if e}

    writer = FileWriter('output.txt')
    writer.writeData(dictionary)
예제 #18
0
    parser = FileParser()
    var_signs, A, directions, bvals, max_or_min, cvals = parse_files(parser)

    # Check if the LP is valid.
    valid, msg = check_valid_lp(var_signs, A, directions, bvals, max_or_min,
                                cvals, verbose)
    if not valid:
        err_msg = msg if verbose else 'Invalid LP. Exiting.'
        exit_with_message(err_msg)

    # TODO(optional): Check if constraints are linearly indepdent. Should be pretty easy with NumPy.

    if command == 'dual':
        d_var_signs, d_A, d_directions, d_bvals, d_max_or_min, d_cvals = convert_to_dual(
            var_signs, A, directions, bvals, max_or_min, cvals)
        writer = FileWriter()
        # Prints model to stdout.
        writer.write_model_to_file(d_var_signs, d_A, d_directions, d_bvals,
                                   d_max_or_min, d_cvals)
    elif command == 'solve':
        model = StandardFormLP(var_signs, A, directions, bvals, max_or_min,
                               cvals)
        if model.are_dependent_constraints():
            print('Dependent constraints. Exiting.')
            exit()
        solver = TwoPhaseSimplex(verbose=verbose)
        solution, obj, x = solver.solve(model)
        if solution == 'Infeasible':
            print('Infeasible problem. Exiting.')
        elif solution == 'Unbounded':
            print('Unbounded problem. Exiting.')
예제 #19
0
 def __init__(self):
     print('Mapping directory.')
     self.directory = self.map_directory()
     self.fileWriter = FileWriter()
     self.substitute_flags()
예제 #20
0
from FileImporter import FileImporter
from FileWriter import FileWriter
from os.path import basename

if __name__ == '__main__':
    # begin execution

    if len(sys.argv) < 3:
        print("Not enough paramaters")
        exit(1)

    bucket_size = int(sys.argv[1])
    input_file_name = str(sys.argv[2])
    output_file_name = "owens-" + \
        os.path.splitext(basename(input_file_name))[
            0] + "-" + str(bucket_size) + ".txt"
    fw = FileWriter(output_file_name)
    fi = FileImporter(input_file_name)
    arr = fi.get_array()
    start_times = []
    end_times = []
    for _ in range(3):
        start_times.append(time.clock())
        ret = bucket_sort(QuickSort, arr, bucket_size)
        end_times.append(time.clock())

    quick_sort_time = sum(end_times) - sum(start_times)
    fw = fw.set_number_buckets(bucket_size).set_sort_size(
        len(ret)).set_quick_sort_time(quick_sort_time)
    fw.set_out_array(ret).write()
예제 #21
0
import Settings
from DataLoader import DataLoader
from FileWriter import FileWriter
from NLP import NLP
from FileReader import FileReader
from FeatureExtraction import FeatureExtraction
from Classifier import Classifier
import pickle as cPickle

if __name__ == '__main__':

    # Get from data/test files and store as json
    json_train = DataLoader(dataPath=Settings.DATA_TRAIN_PATH).get_json()
    json_test = DataLoader(dataPath=Settings.DATA_TEST_PATH).get_json()
    FileWriter(filePath=Settings.DATA_TRAIN_JSON, data=json_train).store_json()
    FileWriter(filePath=Settings.DATA_TEST_JSON, data=json_test).store_json()

    # Load from json files
    train_loader = FileReader(filePath=Settings.DATA_TRAIN_JSON)
    test_loader = FileReader(filePath=Settings.DATA_TEST_JSON)

    ######################  Train  ######################
    # data_train = train_loader.read_json()
    # data_test = test_loader.read_json()

    # FeatureExtraction(data=data_train).load_dict()
    # features_train, labels_train = FeatureExtraction(data=data_train).get_data_and_label()
    # features_test, labels_test = FeatureExtraction(data=data_test).get_data_and_label()

    # est = Classifier(features_train=features_train, features_test=features_test, labels_train=labels_train, labels_test=labels_test)