Ejemplo n.º 1
0
 def deploy_cloud_roles(self, roles):
     pool = ThreadPool(4)
     results = pool.map(self.deploy_role, roles)
     pool.close()
     pool.join()
     return results
Ejemplo n.º 2
0
import skimage.transform
import numpy as np
import time
import os
import pdb
import cPickle as pickle
import random
from scipy import ndimage
from utils import *
from bleu import evaluate, evaluate_captions_ciderD, evaluate_for_particular_captions
from multiprocessing.dummy import Pool as ThreadPool
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

pool = ThreadPool(10)
eps = 1e-10


def names2data(features, names):
    data = np.asarray(pool.map(lambda x: features[x][:], names))
    return data


class CaptioningSolver(object):
    def __init__(self, model, data, val_data, **kwargs):
        """
        Required Arguments:
            - model: Show Attend and Tell caption generating model
            - data: Training data; dictionary with the following keys:
                - features: Feature vectors of shape (82783, 196, 512)
Ejemplo n.º 3
0
print "初始化解析,用时:%.1f秒" % (end - begin)

def countFinish():
    while True:
        print len(finish)
        time.sleep(5)


def download(item):
    time.sleep(random.randint(1,3))
    link, package_path = item
    try:
        r = requests.get(link, timeout=(20,120))
        if r.status_code == 200:
            with open(os.path.join(package_path, link.split("/")[-1]), "wb") as f:
                f.write(r.content)
            finish.append(1)
        else:
            print "下载失败:", link, r.status_code, r.reason
    except Exception as e:
        print "下载失败:", link, str(e)


if __name__ == "__main__":
    t = Thread(target=countFinish)
    t.setDaemon(True)
    t.start()

    pool = ThreadPool(3)
    pool.map(download, source_list[2429:])
Ejemplo n.º 4
0
                            action='store_true',
                            help='pass LM hash as password')
    parser.add_argument('-d',
                        '--domain',
                        default='.',
                        help='domain. default is local')
    parser.add_argument('-w',
                        '--threads',
                        type=int,
                        default=1,
                        help='default 1')
    parser.add_argument('-t',
                        '--timeout',
                        type=int,
                        default=3,
                        help='socket timeout. default 3s')
    parser.add_argument('-f', '--file', help='hosts file, 1 per line')
    parser.add_argument('hosts', nargs='*', help='hostnames or addresses')
    args = parser.parse_args()

    if args.file:
        for addr in open(args.file):
            args.hosts.append(addr.strip())

    if args.prompt:
        args.password = getpass.getpass()

    socket.setdefaulttimeout(args.timeout)
    pool = ThreadPool(args.threads)
    pool.map(auth_thread, [(h, args) for h in set(args.hosts)])
Ejemplo n.º 5
0
 def _parallel_augments(cls, action_fx, data):
     pool = ThreadPool(len(data))
     results = pool.map(action_fx, data)
     pool.close()
     pool.join()
     return results
Ejemplo n.º 6
0
def multiprocessing():
    if max_threads < 1:
        pool = ThreadPool()
    else:
        pool = ThreadPool(max_threads)
    return pool
Ejemplo n.º 7
0
    return Request

def writeRespone(request, fileposition):
    Response = urllib2.urlopen(request, timeout=10)
    fd2 = os.dup(fd)
    os.lseek(fd2, fileposition, os.SEEK_SET)
    print os.write(fd2, Response.read())
    os.close(fd2)

def openURL(start):
    Request = returnRequest(start)
    try:
        writeRespone(Request, start)
    except socket.error, e:
        print e,
        print("%d have error, download later " % start)
        LaterDownList.append(start)
    except urllib2.HTTPError, e:
        print e,
        print("resource unavailable")
        os.remove(File)
        sys.exit(127)

pool = ThreadPool(processes=1)
fd = os.open(File, os.O_WRONLY|os.O_CREAT)
print "go"
pool.map(openURL, LenList)
pool.close()
pool.join()
os.close(fd)
Ejemplo n.º 8
0
 def parse(self, pages=None):
     self.bar = ProgressBar(total=len(pages))
     pool = ThreadPool(64)
     pool.map(self.parse_page, pages)
     pool.close()
     pool.join()
Ejemplo n.º 9
0
def simple_bridge_loops(graph, start_overlap_reads, end_overlap_reads, minimap_alignments,
                        read_dict, scoring_scheme, threads, segments_to_bridge):
    bridges = []
    ra = get_right_arrow()
    zero_loops = 'A' + ra + 'C' + ra + 'B'
    one_loop = 'A' + ra + 'C' + ra + 'D' + ra + 'C' + ra + 'B'
    two_loops = 'A' + ra + 'C' + ra + 'D' + ra + 'C' + ra + 'D' + ra + 'C' + ra + 'B'
    log.log_explanation('Simple loops are parts of the graph where two contigs (A and B) '
                        'are connected via a repeat (C) which loops back to itself (via D). It '
                        'is possible to traverse the loop zero times (' + zero_loops + '), one '
                        'time (' + one_loop + '), two times (' + two_loops + '), etc. '
                        'Long reads which span the loop inform which is the correct number of '
                        'times through. In this step, such reads are found and each is aligned '
                        'against alternative loop counts. A reads casts its "vote" for the loop '
                        'count it agrees best with, and Unicycler creates a bridge using the '
                        'most voted for count.')

    loops = sorted(graph.find_all_simple_loops())
    seg_nums_to_bridge = set(x.number for x in segments_to_bridge)
    loops = [x for x in loops
             if abs(x[0]) in seg_nums_to_bridge and abs(x[1]) in seg_nums_to_bridge and
                abs(x[3]) not in seg_nums_to_bridge]
    loops = [x for x in loops if abs(x[0]) != abs(x[1])]
    if not loops:
        log.log('No suitable simple loops present')
        return []

    col_widths = [5, 6, 6, 5, 5, 18, 5, 7]
    loop_table_header = ['Start', 'Repeat', 'Middle', 'End', 'Read count', 'Read votes',
                         'Loop count', 'Bridge quality']
    print_table([loop_table_header], fixed_col_widths=col_widths, left_align_header=False,
                alignments='RRRRRLRR', indent=0)

    for start, end, middle, repeat in loops:
        if middle is None:
            loop_table_row = [start, repeat, '', end]
        else:
            loop_table_row = [start, repeat, middle, end]

        forward_strand_reads = end_overlap_reads[start] & start_overlap_reads[end]
        reverse_strand_reads = end_overlap_reads[-end] & start_overlap_reads[-start]

        all_reads = list(forward_strand_reads) + list(reverse_strand_reads)
        strands = ['F'] * len(forward_strand_reads) + ['R'] * len(reverse_strand_reads)
        loop_table_row.append(len(all_reads))

        # This dictionary will collect the votes. The key is the number of times through the loop
        # and the value is the vote count. Votes for -1 times through the loop occur for reads
        # which don't conform to the loop assumption.
        votes = defaultdict(int)

        # We'll try a range of repeat counts. The segment depth gives us a first guess as to the
        # repeat count, which guides how high we should test.
        mean_start_end_depth = (graph.segments[abs(start)].depth +
                                graph.segments[abs(end)].depth) / 2
        if middle is None:
            repeat_depth = graph.segments[abs(repeat)].depth
            best_repeat_guess = int(round(repeat_depth / mean_start_end_depth)) - 1
        else:
            middle_depth = graph.segments[abs(middle)].depth
            best_repeat_guess = int(round(middle_depth / mean_start_end_depth))
        best_repeat_guess = max(1, best_repeat_guess)
        max_tested_loop_count = (best_repeat_guess + 1) * 2

        # Use a simple loop if we only have one thread.
        if threads == 1:
            for read, strand in zip(all_reads, strands):
                vote = get_read_loop_vote(start, end, middle, repeat, strand, minimap_alignments,
                                          read, read_dict, graph, max_tested_loop_count,
                                          scoring_scheme)
                votes[vote] += 1

        # Use a thread pool if we have more than one thread.
        else:
            pool = ThreadPool(threads)
            arg_list = []
            for read, strand in zip(all_reads, strands):
                arg_list.append((start, end, middle, repeat, strand, minimap_alignments,
                                 read, read_dict, graph, max_tested_loop_count, scoring_scheme))
            for vote in pool.imap_unordered(get_read_loop_vote_one_arg, arg_list):
                votes[vote] += 1

        # Format the vote totals nicely for the table.
        vote_str = ''
        for loop_count in sorted(votes.keys()):
            if loop_count == -1:
                vote_str += 'bad: '
            elif loop_count == 1:
                vote_str += '1 loop: '
            else:
                vote_str += str(loop_count) + ' loops: '
            vote_count = votes[loop_count]
            vote_str += str(vote_count) + ' vote' + ('s' if vote_count != 1 else '') + '    '
        loop_table_row.append(vote_str.strip())

        # Determine the repeat count which wins!
        results = sorted(list(votes.items()), key=lambda x: x[1], reverse=True)
        if not results:
            loop_table_row += ['no reads', '']
        else:
            winning_loop_count = results[0][0]
            winning_votes = results[0][1]
            if len(results) == 1:
                second_best_votes = 0
                votes_against = 0
            else:
                second_best_votes = results[1][1]
                votes_against = sum(r[1] for r in results) - winning_votes
            if winning_loop_count == -1:
                loop_table_row += ['bad reads', '']
            elif winning_votes == second_best_votes:
                loop_table_row += ['tie vote', '']
            else:
                # If we got here, then we're good to bridge!
                loop_table_row.append(str(winning_loop_count))

                bridge_path = [repeat]
                for _ in range(winning_loop_count):
                    if middle is not None:
                        bridge_path.append(middle)
                    bridge_path.append(repeat)

                bridges.append(SimpleLongReadBridge(graph, start, end, bridge_path, winning_votes,
                                                    votes_against))
                loop_table_row.append(float_to_str(bridges[-1].quality, 1))

        print_table([loop_table_row], fixed_col_widths=col_widths, header_format='normal',
                    alignments='RRRRRLRR', left_align_header=False, bottom_align_header=False,
                    sub_colour={'bad reads': 'red', 'no reads': 'red', 'tie vote': 'red'}, indent=0)
    return bridges
Ejemplo n.º 10
0
def _send_bulk(smss, uses_multiprocessing=True, log_level=None, threads=4):
    # Multiprocessing does not play well with database connection
    # Fix: Close connections on forking process
    # https://groups.google.com/forum/#!topic/django-users/eCAIY9DAfG0
    if uses_multiprocessing:
        db_connection.close()

    if log_level is None:
        log_level = get_log_level()

    sent_smses = []
    failed_smses = []
    sms_count = len(smss)

    logger.info('Process started, sending %s sms' % sms_count)

    def send(sms):
        try:
            sms.dispatch(log_level=log_level, commit=False)
            sent_smses.append(sms)
            logger.debug('Successfully sent sms #%d' % sms.id)
        except Exception as e:
            logger.debug('Failed to send sms #%d' % sms.id)
            failed_smses.append((sms, e))

    number_of_threads = min(threads, sms_count)
    pool = ThreadPool(number_of_threads)

    pool.map(send, smss)
    pool.close()
    pool.join()
    pool.terminate()

    # update statuses of sent and failed_smses
    for sms in sent_smses:
        sms.save()

    for (sms, _) in failed_smses:
        sms.save()

    if log_level >= 1:
        logs = []
        for (sms, exception) in failed_smses:
            logs.append(
                Log(sms=sms,
                    status=STATUS.failed,
                    message=str(exception),
                    exception_type=type(exception).__name__))

        if logs:
            Log.objects.bulk_create(logs)

    if log_level == 2:
        logs = []
        for sms in sent_smses:
            logs.append(Log(sms=sms, status=STATUS.sent))

        if logs:
            Log.objects.bulk_create(logs)

    sent_count = len(sent_smses)
    failed_count = len(failed_smses)
    logger.info('Process finished, %s attempted, %s sent, %s failed' %
                (sms_count, sent_count, failed_count))

    return (sent_count, failed_count)
Ejemplo n.º 11
0
    pre_embed_dim = data.type_num * info_section
    output_dim = max(data.train_list[:, 1].tolist()) + 1  # 隐藏单元节点数  两层

    DEVICE = "cuda" if torch.cuda.is_available() else "cpu"  # 获取预处理数据

    assert select_method in ["end_node", "all_node"]

    x = data.x
    train_list = data.train_list  # 训练节点/数据对应的标签
    test_list = data.test_list  # 测试节点/数据对应的索引
    val_list = data.val_list  # 验证节点/数据对应的索引
    criterion = nn.CrossEntropyLoss().to(DEVICE)

    num_thread = 12
    pool = ThreadPool(num_thread)

    # 此处枚举metapath并初始化选择模型
    metapath_name = mp.enum_metapath_name(data.get_metapath_name(),
                                          data.get_metapath_dict(),
                                          metapath_length)
    metapath_list = mp.enum_longest_metapath_index(data.get_metapath_name(),
                                                   data.get_metapath_dict(),
                                                   metapath_length)

    select_model = GraphMSE(metapath_list=metapath_name,
                            input_dim=input_dim,
                            pre_embed_dim=pre_embed_dim,
                            select_dim=output_dim,
                            mlp_settings=mlp_settings).to(DEVICE)
    train(model=select_model, epochs=100, method="all_node", ablation=ablation)
Ejemplo n.º 12
0
def main():
    p = ThreadPool()
    p.map_async(goto_wc, range(50))
    p.close()
    p.join()
Ejemplo n.º 13
0
                    removal_list=Removal_list,
                    isotypes=Isotypes)

    B = SpadeGating(filename=B_Filename,
                    gates=Gates,
                    parameters=Parameters,
                    source=Source,
                    target=Target,
                    removal_list=Removal_list,
                    isotypes=Isotypes)

    plate_plate = pd.Series()
    antigens_to_test = [
        i for i in A.data.columns if i not in backbone_antigens
    ]

    pool = ThreadPool(num_workers)
    costs = pool.map(worker, antigens_to_test)
    pool.close()
    pool.join()
    output["{}vs{}".format(Normal, Base)] = dict(zip(antigens_to_test, costs))

    writing_out = pd.DataFrame(output)
    writing_out.to_csv(output_file)

c = [i for i in writing_out.columns if i not in ['Untitled: 0']]
#writing_out['mean'] = writing_out.mean(axis=1)
#writing_out['rms'] = np.sqrt(sum(output[c]*output[c],axis=1))

#writing_out.to_csv(output_file)
Ejemplo n.º 14
0
def calculateParallel(numbers, threads=10):
    pool = ThreadPool(threads)
    results = pool.map(squareNumber, numbers)
    pool.close()
    #pool.join()
    return results
Ejemplo n.º 15
0
# For trajectory storage
import h5py
import uuid

# OpenBLAS(used by OpenCV) changes CPU affinity
os.sched_setaffinity(0, range(os.cpu_count()))


def setaff():
    os.sched_setaffinity(0, range(os.cpu_count()))


# for Multi-threading
from multiprocessing.dummy import Pool as ThreadPool

pool = ThreadPool(5, setaff)


# =======================================================================
def filter_trajs_displacement(trajs):
    num_trajs = len(trajs)
    disp_stor = np.empty((num_trajs, ), np.float32)
    for ii in range(num_trajs):
        disp_stor[ii] = np.sum(
            np.sqrt(np.sum((trajs[1:, :] - trajs[0:-1, :])**2, 1)))
    # Remove trajectories that have very low displacement
    good_trajs = np.flatnonzero(disp_stor > -1)

    return good_trajs

Ejemplo n.º 16
0
from PIL import Image, ImageFilter
from multiprocessing.dummy import Pool as ThreadPool 
from skimage.filters import gaussian
import numpy as np
import scipy

NUM_OF_THREADS = 1
SIGMA = 12
FINAL_SLICE_SIZE = 512

filename = 'k'
dirname = './'+filename
BLUR_DIRNAME = dirname+'_blurred'
CROP_DIRNAME = dirname+'_cropped'

pool = ThreadPool(NUM_OF_THREADS) 

# Load every image file in the provided directory
filenames = [os.path.join(dirname, fname)
             for fname in os.listdir(dirname) if fname.endswith('.jpg')]

filenames = np.asarray(filenames)
filenames_split = np.array_split(filenames, NUM_OF_THREADS)

print(filenames)

if not os.path.exists(BLUR_DIRNAME):
    os.mkdir(BLUR_DIRNAME)

if not os.path.exists(CROP_DIRNAME):
    os.mkdir(CROP_DIRNAME)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--api',
                        help='Which API to use. Choose from (gc/azure/aws)',
                        type=str)
    parser.add_argument('--api-key',
                        help='API_KEY or the path to the Key fiile',
                        type=str)
    parser.add_argument('--output-dir',
                        help='Where to store the generated audio samples',
                        type=str)
    parser.add_argument('--length',
                        help='Length of captchas in characters',
                        type=int)
    parser.add_argument('--count',
                        help='How many captchas to generate',
                        type=int)
    parser.add_argument('--scramble',
                        help='Whether to scramble image names',
                        default=False,
                        action='store_true')
    parser.add_argument('--symbols',
                        help='File with the symbols to use in captchas',
                        type=str)
    args = parser.parse_args()

    if args.api is None:
        print("Please specify the cloud platform to use")
        exit(1)

    if args.api_key is None:
        print("Please specify the API_KEY or the path to the Key fiile")
        exit(1)

    if args.output_dir is None:
        print("Please specify the samples output directory")
        exit(1)

    if args.length is None:
        print("Please specify the captcha length")
        exit(1)

    if args.count is None:
        print("Please specify the captcha count to generate")
        exit(1)

    if args.symbols is None:
        print("Please specify the captcha symbols file")
        exit(1)

    symbols_file = open(args.symbols, 'r')
    symbols = symbols_file.readline().strip()

    if not os.path.exists(args.output_dir):
        print("Creating output directory " + args.output_dir)
        os.makedirs(args.output_dir)

    for i in range(args.count):
        captcha_text = ''.join(
            [random.choice(symbols) for j in range(args.length)])

        # GOOGLE Cloud TTS API
        if args.api.lower() == "gc":
            os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = args.api_key
            client = texttospeech.TextToSpeechClient()

            voice_list = []
            voices = client.list_voices()

            for voice in voices.voices:
                gender = texttospeech.enums.SsmlVoiceGender(voice.ssml_gender)
                if format(gender.name) == 'FEMALE':
                    for language_code in voice.language_codes:
                        if re.match('^en-.*', language_code) is not None:
                            voice_list.append(format(voice.name))

            for voice_name in voice_list:
                print('Google TextToSpeech API voice ', voice_name)
                response = gc_synthesize_text(client, captcha_text,
                                              voice_name).audio_content
                file_name = create_filename(args.scramble, args.output_dir,
                                            captcha_text)
                write_audio(response, file_name)
            sleep(0.2)

# MICROSOFT Azure Cloud Cognitive Speeh API
        elif args.api.lower() == "azure":
            voice_list = [
                "Microsoft Server Speech Text to Speech Voice (en-AU, Catherine)",
                "Microsoft Server Speech Text to Speech Voice (en-AU, HayleyRUS)",
                "Microsoft Server Speech Text to Speech Voice (en-CA, Linda)",
                "Microsoft Server Speech Text to Speech Voice (en-CA, HeatherRUS)",
                "Microsoft Server Speech Text to Speech Voice (en-GB, Susan, Apollo)",
                "Microsoft Server Speech Text to Speech Voice (en-GB, HazelRUS)",
                "Microsoft Server Speech Text to Speech Voice (en-US, ZiraRUS)",
                "Microsoft Server Speech Text to Speech Voice (en-US, JessaRUS)",
                "Microsoft Server Speech Text to Speech Voice (en-US, Jessa24kRUS)"
            ]
            speech_key, service_region = args.api_key, "westeurope"
            speech_config = speechsdk.SpeechConfig(subscription=speech_key,
                                                   region=service_region)
            for voice in voice_list:
                file_name = create_filename(args.scramble, args.output_dir,
                                            captcha_text)
                azure_speech_synthesis_to_mp3_file(speech_config,
                                                   " ".join(captcha_text),
                                                   voice, file_name)
            sleep(0.5)

# AWS Polly Speeh API
        elif args.api.lower() == 'aws':
            voice_list = [
                'Salli', 'Joanna', 'Kendra', 'Kimberly', 'Amy', 'Emma',
                'Nicole', 'Raveena', 'Aditi'
            ]
            polly_client = boto3.Session(
                aws_access_key_id="AKIA3TQLBVKRT5JS7Y4O",
                aws_secret_access_key=args.api_key,
                region_name='eu-west-1').client('polly')
            for voice in voice_list:
                response = polly_client.synthesize_speech(
                    VoiceId=voice,
                    Engine="standard",
                    OutputFormat='mp3',
                    Text=" ".join(captcha_text))
                response = response['AudioStream'].read()
                file_name = create_filename(args.scramble, args.output_dir,
                                            captcha_text)
                write_audio(response, file_name)


# GTTS Library
        elif args.api.lower() == 'gtts':
            pool = ThreadPool(4)
            pool.map(my_function, my_array)
            gtts_instance = gTTS(text=captcha_text, lang='en', slow=False)
            file_name = create_filename(args.scramble, args.output_dir,
                                        captcha_text)
            gtts_instance.save(file_name)
Ejemplo n.º 18
0
def calculateParallel(flist, threads=2):
    pool = ThreadPool(threads)
    results = pool.map(do_prune, flist)
    pool.close()
    pool.join()
    return results
Ejemplo n.º 19
0
nlp = spacy.load('de')

from lib import AgdistisEntityLinker

linker = AgdistisEntityLinker()

nlp.add_pipe(linker)

import time
start_time = time.time()

from multiprocessing.dummy import Pool as ThreadPool 

#print(nlp.pipe_names) # Default processing components for en model

pool = ThreadPool(8) 

file = sys.argv[1]
file2 = sys.argv[2]

line2 = open(file2,"w");
def annotate(text):
 #for text in open("testfile.txt", 'r+'):
     
        try:

            newtext = text;    
            doc = nlp(newtext)

            position = 1;
            for ent in doc.ents:
Ejemplo n.º 20
0
def forward_request(input_json, master_name, pretty, debug):
    """
    Forwards a request to the node who has all available information to answer it. This function is called when a
    distributed_master function is used. Only the master node calls this function. An API request will only be forwarded
    to worker nodes.

    :param input_json: API request: Example: {"function": "/agents", "arguments":{"limit":5}, "ossec_path": "/var/ossec", "from_cluster":false}
    :param master_name: Name of the master node. Necessary to check whether to forward it to a worker node or not.
    :param pretty: JSON pretty print
    :param debug: Debug
    :return: a JSON response.
    """
    def forward(node_name, return_none=False):
        """
        Forwards a request to a node.
        :param node_name: Node to forward a request to.
        :param return_none: Whether to return an error message or nothing (if there's an error forwarding the request).
        :return: a JSON response
        """
        if node_name == 'unknown' or node_name == '':
            # if the agent is never connected or pending (i.e. its node name is unknown or empty), do the request locally
            response = json.loads(
                distribute_function(copy.deepcopy(input_json)))
        else:
            # if not, check if the node the request is being forwarded to is the master or a worker.
            command = 'dapi_forward {}'.format(
                node_name) if node_name != master_name else 'dapi'
            if command == 'dapi':
                # if it's the master, execute the request directly
                response = json.loads(
                    distribute_function(copy.deepcopy(input_json),
                                        debug=debug))
            else:
                # if it's a worker, forward it
                response = i_s.execute(
                    '{} {}'.format(command, json.dumps(input_json)),
                    input_json['arguments']['wait_for_complete'])
                if not isinstance(response, dict):
                    # If there's an error and the flag return_none is not set, return a dictionary with the response.
                    response = {
                        'error': 3016,
                        'message': str(WazuhException(3016, response))
                    } if not return_none else None
        return response

    def forward_list(item):
        """
        Function called when there are multiple nodes to forward a request to.
        :param item: A dictionary with {node_name: [list of agents ids]}
        :return: JSON response of a single node
        """
        name, agent_ids = item
        if agent_ids:
            input_json['arguments']['agent_id'] = agent_ids
        return forward(name, agent_ids == [])

    # get the node(s) who has all available information to answer the request.
    node_name, is_list = get_solver_node(input_json, master_name)
    input_json['from_cluster'] = True

    if is_list:
        # if there are multiple nodes to forward the request, create a ThreadPool and forward it in parallel.
        pool = ThreadPool(len(node_name))
        responses = list(
            filter(lambda x: x is not None,
                   pool.map(forward_list, node_name.items())))
        pool.close()
        pool.join()
        final_json = {}
        response = merge_results(responses, final_json, input_json)
    else:
        response = forward(node_name)

    data, error = __split_response_data(response)
    return print_json(data=data, pretty=pretty, error=error)
Ejemplo n.º 21
0
incremented = 0
parameter = 0

q = Queue.Queue()
threadQ = Queue.Queue()


def putToQ(tupleValQ):
    time.sleep(0.1)
    global threadQ
    tupleValQ[1].put(tupleValQ[0])
    threadQ.put(threading.current_thread())
    #queue.put(val) val, queue
    return tupleValQ[0]


pool = ThreadPool(7)
results = pool.map(putToQ, zip([i for i in range(20)],
                               [q] * 20))  #threads start not in array order

print "queue: ",
while not q.empty():
    print q.get(),
print " "

print "threadQueue: ",
while not threadQ.empty():
    print threadQ.get()

print "results: ", results
Ejemplo n.º 22
0
import socket
from multiprocessing.dummy import Pool as ThreadPool
import time

#server_IP = '127.0.0.1'
server_IP = '106.14.161.204'
server_Port = 3389

def tryOnceConnect(args):
    print("thread args = %s"%args)
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect((server_IP, server_Port))
    print(s.recv(1024).decode('utf-8'))
    for data in [b'Michael', b'Tracy', b'Sarah']:
        s.send(data)
        print(s.recv(1024).decode('utf-8'))
    s.send((b'exit'))
    s.close()

#main
args = []
for i in range(0,1024):
    args.append(i)

pool = ThreadPool(1024)
time_start = time.time()
result = pool.map(tryOnceConnect, args)
pool.close()
pool.join()
time_end = time.time()
print "%s 100线程 共消耗:"%str(time_end-time_start)
Ejemplo n.º 23
0
 def _parallel_augment(cls, action_fx, data, n, num_thread=2):
     pool = ThreadPool(num_thread)
     results = pool.map(action_fx, [data] * n)
     pool.close()
     pool.join()
     return results
Ejemplo n.º 24
0
    with open('summary.txt', "w+") as f:
        pass

    ds = [a for a in AnmalZoo]
    ds.remove(AnmalZoo.Hamming)
    ds.remove(AnmalZoo.Levenshtein)
    ds.remove(AnmalZoo.EntityResolution)
    ds.remove(AnmalZoo.Dotstar)
    ds.remove(AnmalZoo.PowerEN)
    ds.remove(AnmalZoo.Brill)
    ds.remove(AnmalZoo.RandomForest)
    ds.remove(AnmalZoo.Dotstar03)
    ds.remove(AnmalZoo.Dotstar06)
    ds.remove(AnmalZoo.Dotstar09)
    ds.remove(AnmalZoo.Protomata)
    ds.remove(AnmalZoo.Ranges05)
    ds.remove(AnmalZoo.Bro217)
    ds.remove(AnmalZoo.Ranges1)
    ds.remove(AnmalZoo.ExactMath)
    ds.remove(AnmalZoo.Custom)
    ds.remove(AnmalZoo.TCP)
    ds.remove(AnmalZoo.Synthetic_BlockRings)

    thread_count = 8

    t_pool = ThreadPool(thread_count)
    results = t_pool.map(process_single_ds, ds)
    t_pool.close()
    t_pool.join()
Ejemplo n.º 25
0
def main():
    import sys
    import getopt
    from glob import glob

    args, img_mask = getopt.getopt(sys.argv[1:], '',
                                   ['debug=', 'square_size=', 'threads='])
    args = dict(args)
    args.setdefault('--debug', './output/')
    args.setdefault('--square_size', 1.0)
    args.setdefault('--threads', 4)
    if not img_mask:
        img_mask = 'C:/Users/wangl/GRIP/images/*.jpeg'  # default
        # img_mask = '../data/left??.jpg'  # default
    else:
        img_mask = img_mask[0]

    img_names = glob(img_mask)
    debug_dir = args.get('--debug')
    if debug_dir and not os.path.isdir(debug_dir):
        os.mkdir(debug_dir)
    square_size = float(args.get('--square_size'))

    pattern_size = (9, 6)
    pattern_points = np.zeros((np.prod(pattern_size), 3), np.float32)
    pattern_points[:, :2] = np.indices(pattern_size).T.reshape(-1, 2)
    pattern_points *= square_size

    obj_points = []
    img_points = []
    h, w = cv.imread(img_names[0], cv.IMREAD_GRAYSCALE
                     ).shape[:2]  # TODO: use imquery call to retrieve results

    def processImage(fn):
        print('processing %s... ' % fn)
        img = cv.imread(fn, 0)
        if img is None:
            print("Failed to load", fn)
            return None

        assert w == img.shape[1] and h == img.shape[0], (
            "size: %d x %d ... " % (img.shape[1], img.shape[0]))
        found, corners = cv.findChessboardCorners(img, pattern_size)
        if found:
            term = (cv.TERM_CRITERIA_EPS + cv.TERM_CRITERIA_COUNT, 30, 0.1)
            cv.cornerSubPix(img, corners, (5, 5), (-1, -1), term)

        if debug_dir:
            vis = cv.cvtColor(img, cv.COLOR_GRAY2BGR)
            cv.drawChessboardCorners(vis, pattern_size, corners, found)
            _path, name, _ext = splitfn(fn)
            outfile = os.path.join(debug_dir, name + '_chess.png')
            cv.imwrite(outfile, vis)

        if not found:
            print('chessboard not found')
            return None

        print('           %s... OK' % fn)
        return (corners.reshape(-1, 2), pattern_points)

    threads_num = int(args.get('--threads'))
    if threads_num <= 1:
        chessboards = [processImage(fn) for fn in img_names]
    else:
        print("Run with %d threads..." % threads_num)
        from multiprocessing.dummy import Pool as ThreadPool
        pool = ThreadPool(threads_num)
        chessboards = pool.map(processImage, img_names)

    chessboards = [x for x in chessboards if x is not None]
    for (corners, pattern_points) in chessboards:
        img_points.append(corners)
        obj_points.append(pattern_points)

    # calculate camera distortion
    rms, camera_matrix, dist_coefs, _rvecs, _tvecs = cv.calibrateCamera(
        obj_points, img_points, (w, h), None, None)

    print("\nRMS:", rms)
    print("camera matrix:\n", camera_matrix)
    print("distortion coefficients: ", dist_coefs.ravel())

    # undistort the image with the calibration
    print('')
    for fn in img_names if debug_dir else []:
        _path, name, _ext = splitfn(fn)
        img_found = os.path.join(debug_dir, name + '_chess.png')
        outfile = os.path.join(debug_dir, name + '_undistorted.png')

        img = cv.imread(img_found)
        if img is None:
            continue

        h, w = img.shape[:2]
        newcameramtx, roi = cv.getOptimalNewCameraMatrix(
            camera_matrix, dist_coefs, (w, h), 1, (w, h))

        dst = cv.undistort(img, camera_matrix, dist_coefs, None, newcameramtx)

        # crop and save the image
        x, y, w, h = roi
        dst = dst[y:y + h, x:x + w]

        print('Undistorted image written to: %s' % outfile)
        cv.imwrite(outfile, dst)

    print('Done')
Ejemplo n.º 26
0
import logging
import itertools

SIO_configuration = configuration.SIOconfiguration()
FIO_instance = FIO('10.139.218.26')
SCLI = scli.SCLI(sio_config=SIO_configuration)
SIOInfraHandler = scli.SIOInfraHandler()
SIOInfraGather = scli.SIOInfraGather(SCLI, SIOInfraHandler)
IntegrationConfigInstance = configuration.Integration()

MainLogger = logger_init.logging_config(
    integration_config=IntegrationConfigInstance,
    logging_mode='DEBUG',
    log_to_file=False,
    executable_path=__file__)
pool = ThreadPool(5)

base_volume = 'vol1'
vol_size_in_tb = 32
sdc_ip_A = '192.168.247.16'
sdc_ip_M = '10.139.218.26'

SCLI.login()


def make_mh_full_snap(base_volume_func: str, snapshot_name_func: str,
                      sdc_ip_a_func: str, sdc_ip_m_func: str,
                      vol_size_in_tb_func: int):
    scini_guid = False
    write_offset = 8796093022208
    logger = logging.getLogger()
Ejemplo n.º 27
0
    batch_start = i
    batch_end = i + seg * batch_size

    with open(vid_data_file, data_mode) as f:  #Open data file

        # for vids_in_run in VidBatcherSingle( batch_start, batch_end, seg ):#loop through this segment of ids
        #     vids_in_run = [ str(i) for i in vids_in_run ]
        #     records = crawler(vids_in_run)
        #
        #     with ThreadPool(2) as p:
        #        records = p.map(crawler, [vids_in_run])
        #
        #     file_cache = file_cache +  records[0]

        #         print(file_cache)

        with ThreadPool(prs) as p:
            records = p.map(crawler, [[
                str(i) for i in vids_in_run
            ] for vids_in_run in VidBatcherSingle(batch_start, batch_end, seg)
                                      ])

        # print('records length: ' + str( len(records) ) + '\n' )
        # print('each record length: ' + str( len(records[0]) ) + '\n' )
        # print('0 the element of the first record length: ' + str( len(records[0][0]) ) + '\n' )
        # print(records)
        records_flatten = [item for sublist in records for item in sublist]
        VidDumpBatch(records_flatten, f)

print('job finished at: ' + str(datetime.datetime.now()) + '\n')
Ejemplo n.º 28
0
def probMask(A, P, X, ii, jj):
    def cross_product(a, b):
        return a[0] * b[1] - a[1] * b[0]

    def iscross(pairData):
        A, B = pairData[0]
        C, D = pairData[1]
        AC = C - A
        AD = D - A
        BC = C - B
        BD = D - B
        CA = -AC
        CB = -BC
        DA = -AD
        DB = -BD

        return 1 if cross_product(AC, AD) * cross_product(
            BC, BD) < 0 and cross_product(CA, CB) * cross_product(
                DA, DB) < 0 else 0

    def calprobMask(pairData):
        Adj, Pro, Xfe, ii, jj = pairData
        Adj = Adj[:ii, :ii]
        Pro = Pro[:jj - ii, :]
        Xfe = Xfe[:jj]

        # existed ridges
        edgesList = []
        edgeSet = set()
        edgesIdxList = []
        # G = nx.Graph()
        for i in range(ii):
            for j in range(i):
                if Adj[i][j] == 1 and (i, j) not in edgeSet:
                    edgesList.append([Xfe[i][:2], Xfe[j][:2]])
                    edgesIdxList.append((i, j))
                    # G.add_edge(i, j)
                    edgeSet.add((i, j))

        newNodes = Xfe[ii:jj]
        newp = Pro
        for idx in range(len(newNodes)):
            for pidx in range(len(Pro[idx])):
                newedge = [Xfe[pidx][:2], Xfe[ii + idx][:2]]
                # edgesIdxList.append((pidx,ii+idx))
                for i, edge in enumerate(edgesList):
                    if pidx in edgesIdxList[i]:
                        continue
                    if iscross([edge, newedge]):
                        newp[idx, pidx] = 0
                        break

        return newp

    newP = []
    pool = ThreadPool(4)
    tasks = [[A[i], P[i], X[i], ii, jj] for i in range(len(A))]
    newP = pool.map(calprobMask, tasks)
    pool.close()
    pool.join()
    return newP
Ejemplo n.º 29
0
    print(driver.title)
    driver.quit()


def gethtml1(url):
    prefs = {"profile.managed_default_content_settings.images": 2}
    chrome_options = Options()
    chrome_options.add_argument('--headless')
    chrome_options.add_experimental_option("prefs", prefs)
    driver = webdriver.Chrome(chrome_options=chrome_options)
    driver.get(url)
    print(driver.title)
    driver.quit()
    #driver.close()


myurls = [
    'http://www.gzzbw.cn/trade/?category=affiche',
]

#gethtml("http://www.gzzbw.cn/trade/?category=affiche")

pool = ThreadPool(4)
#results = pool.map(urllib.request.urlopen, urls)
#results = pool.map(gethtml, myurls)
#gethtml1('https://www.python.org/community/awards/')
results = pool.map(gethtml1, urls)
results = pool.map(gethtml, myurls)
pool.close()
pool.join()
Ejemplo n.º 30
0
		line = fp.readline()
		if line:
			libs.append(line.strip())
		else:
			break
	fp.close()
	return libs

def redisCheck(ip):
	try:
		r = redis.Redis(host=ip,port=6379,db=0,socket_timeout=120)
		return {ip:r.ping()}
	except:
		return {ip:False}

if __name__ == '__main__':
	socket.setdefaulttimeout(10)
	filepath,threed_num = argv[1:]
	ips = readText(filepath)
	pool = ThreadPool(int(threed_num))
	results = pool.map(redisCheck,ips)
	pool.close()
	pool.join()
	result = {}
	for x in results:
		result = dict(result,**x)
	fp = open('redisOut.txt','w')
	for i in result:
		fp.write(i + '	' + str(result[i]) + '\n')
	fp.close