Exemple #1
0
 def n_return_if_stmt(self, node):
     if self.__params['isLambda']:
         self.preorder(node[0])
         self.prune()
     else:
         self.write(self.indent, 'return')
         if self.return_none or node != AST('return_stmt', [AST('ret_expr', [NONE]), Token('RETURN_END_IF')]):
             self.write(' ')
             self.preorder(node[0])
         self.print_()
         self.prune() # stop recursing
Exemple #2
0
 def n_delete_subscr(self, node):
     n = node[-2][0]
     if n == 'build_list' and n[-1].type.startswith('BUILD_TUPLE'):
         if n[-1] != 'BUILD_TUPLE_0':
             n.type = 'build_tuple2'
     elif n == 'LOAD_CONST' and type(n.pattr) == tuple and len(n.pattr) > 0:
         node[-2][0] = AST('build_tuple2',
                           [Token('LOAD_CONST', None, x) for x in n.pattr] + [None])
     self.default(node)        
Exemple #3
0
 def n_yield(self, node):
     self.write('yield')
     if node != AST('yield', [NONE, Token('YIELD_VALUE')]):
         self.write(' ')
         self.preorder(node[0])
     self.prune()  # stop recursing
Exemple #4
0
import sys, re, cStringIO
from types import ListType, TupleType, DictType, \
     EllipsisType, IntType, CodeType

from spark import GenericASTTraversal
import Parser
from Parser import AST
from Scanner import Token, Code

minint = -sys.maxint - 1

# Some ASTs used for comparing code fragments (like 'return None' at
# the end of functions).

RETURN_LOCALS = AST(
    'return_stmt',
    [AST('expr', [Token('LOAD_LOCALS')]),
     Token('RETURN_VALUE')])

NONE = AST('expr', [Token('LOAD_CONST', pattr=None)])

RETURN_NONE = AST('stmt', [AST('return_stmt', [NONE, Token('RETURN_VALUE')])])

PASS = AST('stmts', [AST('sstmt', [AST('stmt', [AST('passstmt', [])])])])

ASSIGN_DOC_STRING = lambda doc_string: \
 AST('stmt',
     [ AST('assign',
    [ AST('expr', [ Token('LOAD_CONST', pattr=doc_string) ]),
      AST('designator', [ Token('STORE_NAME', pattr='__doc__')])
      ])])
Exemple #5
0
    def extract_hyperparameters(self, filename, module):
        astObj = AST()
        hyperParams = astObj.ParseAst(filename, self.project_title)
        for i in range(0, len(hyperParams)):
            if 'epochs' in hyperParams[i]:
                self.experiment_metadata['epochs'] = hyperParams[i].split(
                    '=')[1]
            if 'batch_size' in hyperParams[i]:
                self.experiment_metadata['batch_size'] = hyperParams[i].split(
                    '=')[1]
            #Extract metadata from the model file.

        #Adding Callbacks and Checkpoint Code Dynamically

        flag = 1
        temp = module
        with open(filename) as myfile:
            if 'CSVLogger' in myfile.read():
                return
            temp_file_location = get_python_lib()
            temp_file_location = temp_file_location.replace('\\', '/')
            with open(temp_file_location + '/temp.py', 'w') as file:
                source_lines = inspect.getsourcelines(temp)
                for code in source_lines:
                    if (isinstance(code, (list))):
                        for line in range(0, len(code)):
                            #Adding import functions and call backs
                            if 'import' in code[line] and flag == 1:
                                file.write(
                                    str('from keras.callbacks import CSVLogger, ModelCheckpoint\n'
                                        ))
                                file.write(str(file.write(code[line])) + '\n')
                                flag = 0
                            elif '.fit(' in code[line]:
                                csv_file_path = temp_file_location + '/training.log'
                                file.write("csv_logger = CSVLogger('" +
                                           csv_file_path + "')" + '\n')
                                weights_file_path = temp_file_location + '/weights-improvement-{epoch:02d}-{acc:.2f}.hdf5'
                                file.write(
                                    str('filepath="' + weights_file_path +
                                        '"') + '\n')
                                file.write(
                                    str("checkpoint = ModelCheckpoint(filepath, monitor='acc', verbose=1, save_best_only=False, mode='max')"
                                        + '\n'))
                                file.write(
                                    str("callbacks_list = checkpoint" + '\n'))
                                code[line] = code[line].replace(
                                    ')',
                                    ',callbacks=[csv_logger, callbacks_list])')
                                file.write(code[line])
                            else:
                                file.write(code[line])

        #Executing the temp file
        os.system(sys.executable + ' ' + temp_file_location + '/temp.py')

        model_file = glob.glob(temp_file_location + '/*.hdf5')
        epochs = int(self.experiment_metadata['epochs'])

        #Loading model and parameters
        model = load_model(model_file[epochs - 1])
        self.experiment_metadata['input_shape'] = model.input_shape
        self.experiment_metadata['layers_count'] = len(model.layers)
        self.experiment_metadata['output_shape'] = model.output_shape
        self.experiment_metadata[
            'Optimizer'] = model.optimizer.__class__.__name__
        self.experiment_metadata['LossFunction'] = model.loss

        #Model and Log file
        self.experiment_metadata[
            'callbacks_log'] = temp_file_location + '/training.log'
        self.experiment_metadata['model_file'] = model_file[epochs - 1]

        #Generate Predict Function
        # output = self.generate_predict(model)
        # with open(temp_file_location + '/auto_predict.py', 'w') as file:
        #     file.write(output)

        self.experiment_metadata[
            'predict_function'] = temp_file_location + '/auto_predict.py'
        print(self.experiment_metadata)

        self.saveToLocalDB(self.experiment_metadata)
        #Python_file, Metadata_dict(), project name
        #self.saveToDB(filename, self.experiment_metadata, self.project_title)

        #Deleting all the files that are generated by ModelKB
        os.remove(temp_file_location + '/temp.py')
        os.remove(temp_file_location + '/training.log')
        os.remove(temp_file_location + '/auto_predict.py')
        for i in range(0, len(model_file)):
            os.remove(model_file[i])
        sys.exit(0)