Пример #1
0
 def test_load_data(self):
     data = load_data(self.organisation)
     assert_equals(sorted(data.keys()), sorted(['pull_requests',
         'pull_requests_per_project',
         'pull_request_comments',
         'pull_request_comments_per_project',
         'projects',
         'projects_with_pulls']))
Пример #2
0
 def test_load_data(self):
     data = load_data(self.organisation)
     assert_equals(
         sorted(data.keys()),
         sorted([
             'pull_requests', 'pull_requests_per_project',
             'pull_request_comments', 'pull_request_comments_per_project',
             'projects', 'projects_with_pulls'
         ]))
Пример #3
0
def add_deal(agent_obj):
    username = agent_obj.username
    profiles_data = load_data(PROFILES_FILE_PATH)
    profiles_founded = None
    for profile in profiles_data:
        profiles_founded = profile[username]
    print(profiles_founded)
    id_selected = str(input('Please your profile for deal(enter id):'))
    for profile_founded in profiles_founded:
        if profile_founded['id'] == id_selected:
            deal = Agent.add_deal(agent_obj)
            data = [{
                'agent_name': '{}'.format(agent_obj.username),
                'profile_id': '{}'.format(id_selected),
                'deal': deal
            }]
            save_to_file(DEALS_FILE_PATH, data)
            print('Added deal successfully')
            agent()
Пример #4
0
from theano.tensor.shared_randomstreams import RandomStreams
from dA import dA

try:
    import PIL.Image as Image
except ImportError:
    import Image

learning_rate = 0.1
training_epochs = 100
batch_size = 20
output_folder = 'dA_plots'

print('... loading data')
datasets = store.load_data()

train_set_x, train_set_y = datasets[0]

# compute number of minibatches for training, validation and testing
n_train_batches = train_set_x.get_value(borrow=True).shape[0] // batch_size

# start-snippet-2
# allocate symbolic variables for the data
index = T.lscalar()  # index to a [mini]batch
x = T.matrix('x')  # the data is presented as rasterized images
# end-snippet-2

if not os.path.isdir(output_folder):
    os.makedirs(output_folder)
os.chdir(output_folder)
Пример #5
0
 def test_update_data(self):
     data = load_data('github')
     assert data
Пример #6
0
 def seed_data(self):
     self.organisation = 'yola'
     load_data(self.organisation)
Пример #7
0
 def search():
     return load_data(PROFILES_FILE_PATH)
Пример #8
0
 def seed_data(self):
     self.organisation = 'yola'
     load_data(self.organisation)
Пример #9
0
 def test_update_data(self):
     data = load_data('github')
     assert data
Пример #10
0
def check_agent_username(username):
    agents_data = load_data(AGENTS_FILE_PATH)
    _ = [Agent(**d) for d in agents_data]
    agent = Supervisor.search_username(username)
    return agent
Пример #11
0
from theano.tensor.shared_randomstreams import RandomStreams
from dA import dA

try:
    import PIL.Image as Image
except ImportError:
    import Image

learning_rate = 0.1
training_epochs = 100
batch_size = 20
output_folder = 'dA_plots'

print('... loading data')
datasets = store.load_data()

train_set_x, train_set_y = datasets[0]

# compute number of minibatches for training, validation and testing
n_train_batches = train_set_x.get_value(borrow=True).shape[0] // batch_size

# start-snippet-2
# allocate symbolic variables for the data
index = T.lscalar()    # index to a [mini]batch
x = T.matrix('x')  # the data is presented as rasterized images
# end-snippet-2

if not os.path.isdir(output_folder):
    os.makedirs(output_folder)
os.chdir(output_folder)