def __init__(self, root_dir, idxs, transform=None): """ Args: csv_file (string): Path to the csv file with annotations. root_dir (string): Directory with all the images. transform (callable, optional): Optional transform to be applied on a sample. """ final = pd.read_csv( '/home/cytuser/code/panorama/pretrained-models.pytorch-master/classification_data/final.csv' ) final['label'] = final['label'].apply(lambda x: classes(x)) self.img_infos = final.loc[idxs] self.img_infos = self.img_infos.reset_index() self.root_dir = root_dir self.transform = transform
def __init__(self): # Warriors self.fighter = CLASS.classes(FIGHTER, 10, {STR: 9}, [1, 1], bonusConHP = True, bonusStr = True ) self.paladin = CLASS.classes(PALADIN, 10, {STR: 12, CON: 9, WIS: 13, CHA:17}, [1, 1], bonusConHP = True, bonusStr = True ) self.ranger = CLASS.classes(RANGER, 10, {STR: 13, DEX: 13, CON: 14, WIS: 14}, [1, 1], bonusConHP = True, bonusStr = True ) # Wizards self.mage = CLASS.classes(MAGE, 4, {INT: 9}, [1, 4] ) self.abjurer = CLASS.classes(ABJURER, 4, {INT: 9, WIS: 15}, [1, 4] ) self.conjurer = CLASS.classes(CONJURER, 4, {INT: 9, CON: 15}, [1, 4] ) self.diviner = CLASS.classes(DIVINER, 4, {INT: 9, WIS: 16}, [1, 4] ) self.enchanter = CLASS.classes(ENCHANTER, 4, {INT: 9, CHA: 16}, [1, 4] ) self.illusionist = CLASS.classes(ILLUSIONIST, 4, {INT: 9, DEX: 16}, [1, 4] ) self.invoker = CLASS.classes(INVOKER, 4, {INT: 9, CON: 16}, [1, 4] ) self.necromancer = CLASS.classes(NECROMANCER, 4, {INT: 9, WIS: 16}, [1, 4] ) self.transmuter = CLASS.classes(TRANSMUTER, 4, {INT: 9, DEX: 15}, [1, 4] ) # Clerics self.priest = CLASS.classes(PRIEST, 8, {WIS: 9}, [2, 3] ) self.druid = CLASS.classes(DRUID, 8, {WIS: 9}, [2, 3] ) # Rogues self.bard = CLASS.classes(BARD, 6, {DEX: 12, INT: 13, CHA: 15}, [1, 2] ) self.thief = CLASS.classes(THIEF, 6, {DEX: 9}, [1, 2] )
def main(): global args, best_prec1 args = parser.parse_args() final = pd.read_csv( '/home/cytuser/code/panorama/pretrained-models.pytorch-master/classification_data/final.csv' ) final['label'] = final['label'].apply(lambda x: classes(x)) y = final['label'].values skf = StratifiedKFold(n_splits=5, shuffle=True) kfold = [] for train_index, test_index in skf.split(y, y): #print(len(train_index)) #print(len(test_index)) kfold.append([train_index, test_index]) # create model print("=> creating model '{}'".format(args.arch)) #if args.pretrained.lower() not in ['false', 'none', 'not', 'no', '0']: # print("=> using pre-trained parameters '{}'".format(args.pretrained)) # model = pretrainedmodels.__dict__[args.arch](num_classes=1000, # pretrained=args.pretrained) #else: # model = pretrainedmodels.__dict__[args.arch]() model = pretrainedmodels.__dict__['se_resnet50'](num_classes=1000, pretrained='imagenet') n_inputs = model.last_linear.in_features classifier = nn.Sequential( OrderedDict([('classifier', nn.Linear(n_inputs, 596))])) #model.conv0.conv = nn.Conv2d(1, 96, kernel_size=(3, 3), stride=(2, 2), bias=False) model.layer0.conv1 = nn.Conv2d(1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False) model.last_linear = classifier #model = EfficientNetB0() #model.conv1 = nn.Conv2d(1, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # optionally resume from a checkpoint if args.resume: if os.path.isfile(args.resume): print("=> loading checkpoint '{}'".format(args.resume)) checkpoint = torch.load(args.resume) args.start_epoch = checkpoint['epoch'] best_prec1 = checkpoint['best_prec1'] model.load_state_dict(checkpoint['state_dict']) print("=> loaded checkpoint '{}' (epoch {})".format( args.resume, checkpoint['epoch'])) else: print("=> no checkpoint found at '{}'".format(args.resume)) cudnn.benchmark = True # Data loading code traindir = os.path.join(args.data, 'classification_data') #print(traindir) #valdir = os.path.join(args.data, 'val') # transform = transforms.Compose([ # transforms.Resize((224,224)), # transforms.ToTensor(), # transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225))]) transform = transforms.Compose([ #transforms.Resize((224,224)), transforms.ToTensor(), transforms.Normalize([0.5], [0.5]) ]) # if 'scale' in pretrainedmodels.pretrained_settings[args.arch][args.pretrained]: # scale = pretrainedmodels.pretrained_settings[args.arch][args.pretrained]['scale'] # else: # scale = 0.875 scale = 0.875 #print('Images transformed from size {} to {}'.format( # int(round(max(model.input_size) / scale)), # model.input_size)) # define loss function (criterion) and optimizer criterion = nn.CrossEntropyLoss().cuda() optimizer = torch.optim.SGD(model.parameters(), args.lr, momentum=args.momentum, weight_decay=args.weight_decay) model = torch.nn.DataParallel(model).cuda() for epoch in range(args.start_epoch, args.epochs): adjust_learning_rate(optimizer, epoch) i = np.random.choice(5) train_idx, val_idx = kfold[i][0], kfold[i][1] train_dataset = Mydataset(traindir, train_idx, transform) #print('batch size is :',args.batch_size) train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=args.workers, pin_memory=True) val_dataset = Mydataset(traindir, val_idx, transform) val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.workers, pin_memory=True) # train for one epoch train(train_loader, model, criterion, optimizer, epoch) # evaluate on validation set prec1 = validate(val_loader, model, criterion) if epoch % 5 == 0: validate(val_loader, model, criterion) # remember best prec@1 and save checkpoint is_best = prec1[0] > best_prec1 best_prec1 = max(prec1[0], best_prec1) save_checkpoint( { 'epoch': epoch + 1, 'arch': args.arch, 'state_dict': model.state_dict(), 'best_prec1': best_prec1, }, is_best)
from classes import classes student1 = classes("Oscar", "Accounting", 3.1, False) student2 = classes("Kevin", "IT", 3.8, False) print(student1.on_honor_roll())
import sys import numpy as np from metrics import metrics from evaluation import evaluate from scoring import score from classes import classes from math import floor import itertools import matplotlib.pyplot as plt import operator from operator import itemgetter, attrgetter import os times = dict() #Links ObsT+PredT+Obs predtimes=dict() #Links Pred pairclasses = classes() metrics= metrics() Extract = False OnePred = False pairclasses._classthreshold = 5 threshold = True # UPGMA = False UPGMAINV = False UPGMASIZE = False #Number of step of random explo without classes RENbstep =1000 #Number of step of random explo with classes REPNbstep=1000 #Maximum number of step during gradient descent without classes GDMaxstep=1000
#classes is a data-type. That you define your self. # # from FILE import CLASS # object is a class with information # self refering to actual object. from classes import classes student1 = classes("Jim", "Business", 3.1, False) print(student1.name)
def importClasses(school_id, branch_id, term=False): try: classList = classesApi.classes( { "school_id": school_id, "branch_id": branch_id }, term) db = database.db if classList is None: error.log(__file__, False, "Unknown Object") return if not "status" in classList: error.log(__file__, False, "Unknown Object") return if classList["status"] == "ok": for classObject in classList["classes"]: unique = { "school_id": classObject["school_id"], "branch_id": classObject["branch_id"], "class_id": classObject["class_id"] } names = [] names.append({ "term": str(classList["term"]["value"]), "name": unicode(str(classObject["name"]).decode("utf8")) }) existsing = db.classes.find(unique).limit(1) if existsing.count() > 0: existsing = existsing[0] if "names" in existsing: if "names" in existsing: for row in existsing["names"]: if not row["term"] == classList["term"][ "value"]: names.append(row) element = { "names": names, "school_id": classObject["school_id"], "branch_id": classObject["branch_id"], "class_id": classObject["class_id"], "type": classObject["type"] } status = sync.sync(db.classes, unique, element) if sync.check_action_event(status) == True: for url in sync.find_listeners('class', unique): sync.send_event(url, status["action"], element) for url in sync.find_listeners('school', { "school": school_id, "branch_id": branch_id }): sync.send_event(url, "class", element) for url in sync.find_general_listeners('class_general'): sync.send_event(url, status["action"], element) # Launch class_members scraper # Launch class teams else: if "error" in classList: error.log(__file__, False, classList["error"]) else: error.log(__file__, False, "Unknown Error") except Exception, e: print str(e) error.log(__file__, False, str(e))