Exemple #1
0
def process_files(folder, subset_folder):
    s3 = boto3.resource("s3")
    source_bucket = s3.Bucket("tide-source-data")
    data_bucket = s3.Bucket("maccoss-tide")
    objects = list(source_bucket.objects.filter(Prefix=folder))
    for obj in objects:
        obj_folder = subset_folder + "/" + obj.key.split("/")[-1]
        if not os.path.isdir(obj_folder):
            os.mkdir(obj_folder)

        upload.upload("maccoss-tide", obj.key, "tide-source-data")
        keys = []

        while len(keys) < 1:
            keys = list(
                map(lambda o: o.key,
                    list(data_bucket.objects.filter(Prefix="5/"))))
            time.sleep(10)

        match_file = obj_folder + "/match"
        with open(match_file, "wb+") as f:
            data_bucket.download_fileobj(keys[0], f)

        top_match = open(match_file).read().strip()
        with open(obj_folder + "/confidence", "wb+") as f:
            data_bucket.download_fileobj(top_match, f)
        clear.clear("maccoss-tide")
        clear.clear("maccoss-log")
Exemple #2
0
    def scanfiles(self):
        for root, dirs, files in os.walk("files"):
            for file_ in files:
                check = root+"/"+file_
                if check not in self.files:
                    try:
                        self.files[check] = hash(open(check).read()) 
                        sock = socket.socket()
                        sock.connect((self.host, self.port))
                        upload.upload(sock, check, self.username, self.password)
                        print "{0} Outgoing file transfer {1}".format(datetime.datetime.now(), check)
                    except Exception, e:
                        #print e
                        del self.files[check]
                    
                    time.sleep(0.5)

                else:
                    with open(check, 'rb') as file:
                        hashc = hash(file.read())
                        if hashc != self.files[check]:
                            try:
                                sock = socket.socket()
                                sock.connect((self.host, self.port))
                                upload.upload(sock, check, self.username, self.password)
                                print "{0} Outgoing file transfer {1}".format(datetime.datetime.now(), check)
                                self.files[check] = hash(open(check).read()) 
                            except Exception, e:
                                pass 
                            time.sleep(0.5)
Exemple #3
0
    def scanfiles(self):
        for root, dirs, files in os.walk("files"):
            for file_ in files:
                check = root + "/" + file_
                if check not in self.files:
                    try:
                        self.files[check] = hash(open(check).read())
                        sock = socket.socket()
                        sock.connect((self.host, self.port))
                        upload.upload(sock, check, self.username,
                                      self.password)
                        print "{0} Outgoing file transfer {1}".format(
                            datetime.datetime.now(), check)
                    except Exception, e:
                        #print e
                        del self.files[check]

                    time.sleep(0.5)

                else:
                    with open(check, 'rb') as file:
                        hashc = hash(file.read())
                        if hashc != self.files[check]:
                            try:
                                sock = socket.socket()
                                sock.connect((self.host, self.port))
                                upload.upload(sock, check, self.username,
                                              self.password)
                                print "{0} Outgoing file transfer {1}".format(
                                    datetime.datetime.now(), check)
                                self.files[check] = hash(open(check).read())
                            except Exception, e:
                                pass
                            time.sleep(0.5)
Exemple #4
0
    def parsetraces(self, traits: TestTraits):
        directory = TRACEDIR
        if traits.tracefolder:
            directory = TRACEDIR + '/' + traits.tracefolder
            getLogger().info("Parse Directory: " + directory)

        startup_args = [
            self.startuppath,
            '--app-exe', traits.apptorun,
            '--parse-only',
            '--metric-type', traits.startupmetric, 
            '--trace-name', traits.tracename,
            '--report-json-path', self.reportjson,
            '--trace-directory', directory
        ]
        if traits.scenarioname:
            startup_args.extend(['--scenario-name', traits.scenarioname])

        upload_container = UPLOAD_CONTAINER

        try:
            RunCommand(startup_args, verbose=True).run()
        except CalledProcessError:
            getLogger().info("Run failure registered")
            # rethrow the original exception 
            raise

        if runninginlab():
            copytree(TRACEDIR, os.path.join(helixuploaddir(), 'traces'))
            if uploadtokenpresent():
                import upload
                upload.upload(self.reportjson, upload_container, UPLOAD_QUEUE, UPLOAD_TOKEN_VAR, UPLOAD_STORAGE_URI)
def sense_ds18b20(sensor_name):
    data_list = []
    temperature = 0

    os.system('modprobe w1-gpio')
    os.system('modprobe w1-therm')
    temp_sensor = '/sys/bus/w1/devices/28-03168b33a0ff/w1_slave'

    t = open(temp_sensor, 'r')
    lines = t.readlines()
    t.close()
 
    temp_output = lines[1].find('t=')
    if temp_output != -1:
        temp_string = lines[1].strip()[temp_output+2:]
        temp_c = float(temp_string)/1000.0
        temperature = temp_c

    data_list.append(sensor_name)
    data_list.append(round(temperature, 1))
    data_list.append(None) # Humidity
    data_list.append(None) # Moisture
    now = datetime.now()
    data_list.append(now.strftime("%Y-%m-%d %H:%M:%S"))
    time.sleep(2)

    print(data_list)
    upload(data_list)
Exemple #6
0
def do_test(ip, file_size_kb, test_count=5, port=80):
    file_size = file_size_kb * 1024
    time_results = []

    for i in range(test_count):
        start_time = time.time()
        upload(ip,
               'test_file',
               make_test_file(file_size),
               port=port,
               timeout=3600)
        delta_time = time.time() - start_time
        time_results.append(delta_time)

        log_tpl = 'Test {i}/{total_tests}: finished_for={delta_time:0.2f}s file_size={file_size:0.2f}kb'
        logger.debug(
            log_tpl.format(i=i + 1,
                           total_tests=test_count,
                           delta_time=delta_time,
                           file_size=file_size / 1024))

    average_time = sum(time_results) / len(time_results)
    logger.debug(
        'Done: avg_time={:0.2f}s, file_size={:0.2f}kb, speed={:0.2f}kb/s'.
        format(average_time, file_size / 1024,
               file_size / average_time / 1024))

    return average_time
Exemple #7
0
def run(upload_path, local_path, mail_receiver):
    upload.upload(upload_path, local_path)  # run upload
    share_path = get_zip.get_zip(upload_path)  # get the zip
    share_info = share.share(share_path)  # get share url and password
    print(share_info)
    send_email.send_email(mail_receiver,
                          share_info)  # send share_info via email
 def go():
     for benchmark in benchmarks:
         for param in raw[benchmark].keys():
             for statistic in options['statistics'].split():
                 stat, samples = select(
                     raw, benchmark, param, statistic)
                 samples = stat.squash(samples)
                 yield upload(
                     reactor, 
                     options['url'], options['project'],
                     options['revision'], options['revision-date'],
                     benchmark, param, statistic,
                     options['backend'], options['environment'],
                     samples)
                 
                 # This is somewhat hard-coded to the currently
                 # collected stats.
                 if statistic == 'SQL':
                     stat, samples = select(
                         raw, benchmark, param, 'execute')
                     samples = stat.squash(samples, 'count')
                     yield upload(
                         reactor, 
                         options['url'], options['project'],
                         options['revision'], options['revision-date'],
                         benchmark, param, statistic + 'count',
                         options['backend'], options['environment'],
                         samples)
Exemple #9
0
def __main(args: list) -> int:
    validate_supported_runtime()
    args = __process_arguments(args)
    verbose = not args.quiet
    setup_loggers(verbose=verbose)

    if not args.frameworks:
        raise Exception("Framework version (-f) must be specified.")

    target_framework_monikers = dotnet \
        .FrameworkAction \
        .get_target_framework_monikers(args.frameworks)
    # Acquire necessary tools (dotnet)
    init_tools(architecture=args.architecture,
               dotnet_versions=args.dotnet_versions,
               target_framework_monikers=target_framework_monikers,
               verbose=verbose)

    # WORKAROUND
    # The MicroBenchmarks.csproj targets .NET Core 2.1, 3.0, 3.1 and 5.0
    # to avoid a build failure when using older frameworks (error NETSDK1045:
    # The current .NET SDK does not support targeting .NET Core $XYZ)
    # we set the TFM to what the user has provided.
    os.environ['PERFLAB_TARGET_FRAMEWORKS'] = ';'.join(
        target_framework_monikers)

    # dotnet --info
    dotnet.info(verbose=verbose)

    BENCHMARKS_CSPROJ = dotnet.CSharpProject(project=args.csprojfile,
                                             bin_directory=args.bin_directory)

    if not args.run_only:
        # .NET micro-benchmarks
        # Restore and build micro-benchmarks
        micro_benchmarks.build(BENCHMARKS_CSPROJ, args.configuration,
                               target_framework_monikers, args.incremental,
                               verbose)

    # Run micro-benchmarks
    if not args.build_only:
        upload_container = UPLOAD_CONTAINER
        try:
            for framework in args.frameworks:
                micro_benchmarks.run(BENCHMARKS_CSPROJ, args.configuration,
                                     framework, verbose, args)
            globpath = os.path.join(
                get_artifacts_directory() if not args.bdn_artifacts else
                args.bdn_artifacts, '**', '*perf-lab-report.json')
        except CalledProcessError:
            getLogger().info("Run failure registered")
            # rethrow the caught CalledProcessError exception so that the exception being bubbled up correctly.
            raise

        dotnet.shutdown_server(verbose)

        if args.upload_to_perflab_container:
            import upload
            upload.upload(globpath, upload_container, UPLOAD_QUEUE,
                          UPLOAD_TOKEN_VAR, UPLOAD_STORAGE_URI)
Exemple #10
0
 def test_upload_directory_private(self):
     """Test that objects uploaded with a private ACL are inaccessible to the public."""
     upload('test-dir', test_bucket, acl='private', s3_dest='uploads')
     s3_url = 'https://s3.amazonaws.com/{}/uploads/test-file.txt'.format(test_bucket)
     r = requests.get(s3_url)
     self.assertEquals(r.status_code, 403)
     self.assertIn('<Message>Access Denied</Message>', r.text)
Exemple #11
0
def window_state(state_name, window_gpio):

    counter = 0
    value = 0
    data_list = []

    while counter < 5:
        gpio.setmode(gpio.BCM)
        gpio.setup(window_gpio, gpio.IN, gpio.PUD_UP)
        gpio.setup(window_gpio, gpio.IN, gpio.PUD_UP)

        if gpio.input(window_gpio) == 1:
            print("window open")
        else:
            print("window closed")
            value += 1

        gpio.cleanup()
        counter += 1
        time.sleep(2.0)

    if value >= 4:
        position = False
    else:
        position = True

    data_list.append(state_name)
    data_list.append(position)
    data_list.append(None) #Value
    now = datetime.now()
    data_list.append(now.strftime("%Y-%m-%d %H:%M:%S"))
    
    print(data_list)

    upload(data_list)
Exemple #12
0
    def runtests(self, scenarioname, dirs, artifact=None):
        '''
        Runs tests through sod tool
        '''
        if not os.path.exists(TRACEDIR):
            os.mkdir(TRACEDIR)
        reportjson = os.path.join(TRACEDIR, 'perf-lab-report.json')
        sod_args = [
            self.sodexe, '--report-json-path', reportjson, '--scenario-name',
            (scenarioname or "Empty Scenario Name"), '--dirs'
        ]
        sod_args += dirs.split(';')

        RunCommand(sod_args, verbose=True).run()

        if artifact:
            if not os.path.exists(artifact):
                raise FileNotFoundError(f'Artifact {artifact} is not found.')
            else:
                copy(artifact, TRACEDIR)

        if runninginlab():
            copytree(TRACEDIR, os.path.join(helixuploaddir(), 'traces'))
            if uploadtokenpresent():
                import upload
                upload.upload(reportjson, UPLOAD_CONTAINER, UPLOAD_QUEUE,
                              UPLOAD_TOKEN_VAR, UPLOAD_STORAGE_URI)
Exemple #13
0
def main():
    global args
    try:
        if args.upload:
            upload.upload()
        if args.stopall:
            stopall.stopall()
        if args.rename:
            upload.rename()
        if args.cleanUp:
            cleanupDB()

        if args.restart:
            stopall.restart()

        if args.kitchenSink:
            upload.upload()
            stopall.stopall()
            upload.rename()
            stopall.restart()

        print("finished")

    except botocore.exceptions.ClientError as e:
        print(e)
Exemple #14
0
    def go():
        for benchmark in benchmarks:
            for param in raw[benchmark].keys():
                for statistic in options['statistics'].split():
                    stat, samples = select(
                        raw, benchmark, param, statistic)
                    samples = stat.squash(samples)
                    yield upload(
                        reactor,
                        options['url'], options['project'],
                        options['revision'], options['revision-date'],
                        benchmark, param, statistic,
                        options['backend'], options['environment'],
                        samples)

                    # This is somewhat hard-coded to the currently
                    # collected stats.
                    if statistic == 'SQL':
                        stat, samples = select(
                            raw, benchmark, param, 'execute')
                        samples = stat.squash(samples, 'count')
                        yield upload(
                            reactor,
                            options['url'], options['project'],
                            options['revision'], options['revision-date'],
                            benchmark, param, statistic + 'count',
                            options['backend'], options['environment'],
                            samples)
def __main(args: list) -> int:
    validate_supported_runtime()
    args = __process_arguments(args)
    verbose = not args.quiet
    setup_loggers(verbose=verbose)

    # This validation could be cleaner
    if args.generate_benchview_data and not args.benchview_submission_name:
        raise RuntimeError("""In order to generate BenchView data,
            `--benchview-submission-name` must be provided.""")

    target_framework_monikers = micro_benchmarks \
        .FrameworkAction \
        .get_target_framework_monikers(args.frameworks)
    # Acquire necessary tools (dotnet, and BenchView)
    init_tools(architecture=args.architecture,
               dotnet_versions=args.dotnet_versions,
               target_framework_monikers=target_framework_monikers,
               verbose=verbose)

    # WORKAROUND
    # The MicroBenchmarks.csproj targets .NET Core 2.0, 2.1, 2.2 and 3.0
    # to avoid a build failure when using older frameworks (error NETSDK1045:
    # The current .NET SDK does not support targeting .NET Core $XYZ)
    # we set the TFM to what the user has provided.
    os.environ['PYTHON_SCRIPT_TARGET_FRAMEWORKS'] = ';'.join(
        target_framework_monikers)

    # dotnet --info
    dotnet.info(verbose=verbose)

    BENCHMARKS_CSPROJ = dotnet.CSharpProject(project=args.csprojfile,
                                             bin_directory=args.bin_directory)

    if not args.run_only:
        # .NET micro-benchmarks
        # Restore and build micro-benchmarks
        micro_benchmarks.build(BENCHMARKS_CSPROJ, args.configuration,
                               target_framework_monikers, args.incremental,
                               verbose)

    # Run micro-benchmarks
    if not args.build_only:
        for framework in args.frameworks:
            micro_benchmarks.run(BENCHMARKS_CSPROJ, args.configuration,
                                 framework, verbose, args)

        benchview.run_scripts(args, verbose, BENCHMARKS_CSPROJ)

        if args.upload_to_perflab_container:
            if args.architecture == 'arm64':
                globpath = os.path.join(
                    get_artifacts_directory() if not args.bdn_artifacts else
                    args.bdn_artifacts, '**', '*perf-lab-report.json')

                upload.upload(globpath, 'results', 'PERFLAB_UPLOAD_TOKEN',
                              'pvscmdupload.blob.core.windows.net')
            else:
                AzCopy.upload_results('', args.bdn_artifacts, verbose=verbose)
Exemple #16
0
def __main(args: list) -> int:
    validate_supported_runtime()
    args = __process_arguments(args)
    verbose = not args.quiet
    setup_loggers(verbose=verbose)

    target_framework_monikers = micro_benchmarks \
        .FrameworkAction \
        .get_target_framework_monikers(args.frameworks)
    # Acquire necessary tools (dotnet)
    init_tools(architecture=args.architecture,
               dotnet_versions=args.dotnet_versions,
               target_framework_monikers=target_framework_monikers,
               verbose=verbose)

    # WORKAROUND
    # The MicroBenchmarks.csproj targets .NET Core 2.0, 2.1, 2.2 and 3.0
    # to avoid a build failure when using older frameworks (error NETSDK1045:
    # The current .NET SDK does not support targeting .NET Core $XYZ)
    # we set the TFM to what the user has provided.
    os.environ['PERFLAB_TARGET_FRAMEWORKS'] = ';'.join(
        target_framework_monikers)

    # dotnet --info
    dotnet.info(verbose=verbose)

    BENCHMARKS_CSPROJ = dotnet.CSharpProject(project=args.csprojfile,
                                             bin_directory=args.bin_directory)

    if not args.run_only:
        # .NET micro-benchmarks
        # Restore and build micro-benchmarks
        micro_benchmarks.build(BENCHMARKS_CSPROJ, args.configuration,
                               target_framework_monikers, args.incremental,
                               verbose)

    # Run micro-benchmarks
    if not args.build_only:
        for framework in args.frameworks:
            micro_benchmarks.run(BENCHMARKS_CSPROJ, args.configuration,
                                 framework, verbose, args)

        dotnet.shutdown_server(verbose)

        if args.upload_to_perflab_container:
            import upload
            globpath = os.path.join(
                get_artifacts_directory() if not args.bdn_artifacts else
                args.bdn_artifacts, '**', '*perf-lab-report.json')

            #No queue insertion
            upload.upload(globpath, UPLOAD_CONTAINER, None, UPLOAD_TOKEN_VAR,
                          UPLOAD_STORAGE_URI)
            #With queue insertion
            upload.upload(globpath, 'resultsandbox', UPLOAD_QUEUE,
                          UPLOAD_TOKEN_VAR, UPLOAD_STORAGE_URI)
Exemple #17
0
    def runtests(self, traits: TestTraits):
        '''
        Runs tests through startup
        '''
        # make sure required arguments are present
        for key in ['apptorun', 'startupmetric', 'guiapp']:
            if not getattr(traits, key):
                raise Exception('startup tests require %s' % key)
        reportjson = os.path.join(TRACEDIR, 'perf-lab-report.json')
        defaultiterations = '1' if runninginlab() and not uploadtokenpresent(
        ) else '5'  # only run 1 iteration for PR-triggered build
        # required arguments & optional arguments with default values
        startup_args = [
            self.startuppath, '--app-exe', traits.apptorun, '--metric-type',
            traits.startupmetric, '--trace-name',
            '%s_startup' % (traits.scenarioname or '%s_%s' %
                            (traits.exename, traits.scenariotypename)),
            '--gui-app', traits.guiapp, '--process-will-exit',
            (traits.processwillexit or 'true'), '--iterations',
            '%s' % (traits.iterations or defaultiterations), '--timeout',
            '%s' % (traits.timeout or '50'), '--warmup',
            '%s' % (traits.warmup or 'true'), '--working-dir',
            '%s' % (traits.workingdir or sys.path[0]), '--report-json-path',
            reportjson, '--trace-directory', TRACEDIR
        ]
        # optional arguments without default values
        if traits.scenarioname:
            startup_args.extend(['--scenario-name', traits.scenarioname])
        if traits.appargs:
            startup_args.extend(['--app-args', traits.appargs])
        if traits.environmentvariables:
            startup_args.extend(
                ['--environment-variables', traits.environmentvariables])
        if traits.iterationsetup:
            startup_args.extend(['--iteration-setup', traits.iterationsetup])
        if traits.setupargs:
            startup_args.extend(['--setup-args', traits.setupargs])
        if traits.iterationcleanup:
            startup_args.extend(
                ['--iteration-cleanup', traits.iterationcleanup])
        if traits.cleanupargs:
            startup_args.extend(['--cleanup-args', traits.cleanupargs])
        if traits.measurementdelay:
            startup_args.extend(
                ['--measurement-delay', traits.measurementdelay])
        if traits.skipprofile:
            startup_args.extend(['--skip-profile-iteration'])

        RunCommand(startup_args, verbose=True).run()

        if runninginlab():
            copytree(TRACEDIR, os.path.join(helixuploaddir(), 'traces'))
            if uploadtokenpresent():
                import upload
                upload.upload(reportjson, UPLOAD_CONTAINER, UPLOAD_QUEUE,
                              UPLOAD_TOKEN_VAR, UPLOAD_STORAGE_URI)
Exemple #18
0
    def runtests(self, apptorun: str, **kwargs):
        '''
        Runs tests through startup
        '''
        for key in ['startupmetric', 'guiapp']:
            if not kwargs[key]:
                raise Exception('startup tests require %s' % key)
        reportjson = os.path.join(TRACEDIR, 'perf-lab-report.json')
        defaultiterations = '1' if runninginlab() and not uploadtokenpresent(
        ) else '5'  # only run 1 iteration for PR-triggered build
        startup_args = [
            self.startupexe, '--app-exe', apptorun, '--metric-type',
            kwargs['startupmetric'], '--trace-file-name',
            '%s_startup.etl' %
            (kwargs['scenarioname'] or '%s_%s' %
             (kwargs['exename'], kwargs['scenariotypename'])),
            '--process-will-exit', (kwargs['processwillexit']
                                    or 'true'), '--iterations',
            '%s' % (kwargs['iterations'] or defaultiterations), '--timeout',
            '%s' % (kwargs['timeout'] or '50'), '--warmup',
            '%s' % (kwargs['warmup'] or 'true'), '--gui-app', kwargs['guiapp'],
            '--working-dir',
            '%s' % (kwargs['workingdir'] or sys.path[0]), '--report-json-path',
            reportjson, '--trace-directory', TRACEDIR
        ]
        # optional arguments
        if kwargs['scenarioname']:
            startup_args.extend(['--scenario-name', kwargs['scenarioname']])
        if kwargs['appargs']:
            startup_args.extend(['--app-args', kwargs['appargs']])
        if kwargs['environmentvariables']:
            startup_args.extend(
                ['--environment-variables', kwargs['environmentvariables']])
        if kwargs['iterationsetup']:
            startup_args.extend(
                ['--iteration-setup', kwargs['iterationsetup']])
        if kwargs['setupargs']:
            startup_args.extend(['--setup-args', kwargs['setupargs']])
        if kwargs['iterationcleanup']:
            startup_args.extend(
                ['--iteration-cleanup', kwargs['iterationcleanup']])
        if kwargs['cleanupargs']:
            startup_args.extend(['--cleanup-args', kwargs['cleanupargs']])
        if kwargs['measurementdelay']:
            startup_args.extend(
                ['--measurement-delay', kwargs['measurementdelay']])

        RunCommand(startup_args, verbose=True).run()

        if runninginlab():
            copytree(TRACEDIR, os.path.join(helixuploaddir(), 'traces'))
            if uploadtokenpresent():
                import upload
                upload.upload(reportjson, UPLOAD_CONTAINER, UPLOAD_QUEUE,
                              UPLOAD_TOKEN_VAR, UPLOAD_STORAGE_URI)
Exemple #19
0
def regenerate_graphs(conn):
    last_update, queue_contracts, accepted_contracts = queue.load(conn)            
    
    try:
        log("Regenerating 60-min queue graph")
        queue_60mins = last_update.strftime('graphs/queue_3600_%Y%m%d%H%M%S.png')
        graph.make_queue_graph(last_update = last_update, queue_contracts = queue_contracts, accepted_contracts = accepted_contracts, filename=queue_60mins, scale=3600)
        log("Uploading 60-min queue graph")
        upload.upload(f=queue_60mins, key_name='queue.png')
    except:
        log("Exception generating 60-min graph (ignored)")
        traceback.print_exc()        

    try:
        log("Regenerating 15-min queue graph")
        queue_15mins = last_update.strftime('graphs/queue_900_%Y%m%d%H%M%S.png')
        graph.make_queue_graph(last_update = last_update, queue_contracts = queue_contracts, accepted_contracts = accepted_contracts, filename=queue_15mins, scale=900)
        log("Uploading 15-min queue graph")
        upload.upload(f=queue_15mins, key_name='queue15.png')
    except:
        log("Exception generating 15-min graph (ignored)")
        traceback.print_exc()
    
    try:        
        log("Regenerating 1-day delivery time graph")
        delivery_1day = last_update.strftime('graphs/delivery_1day_%Y%m%d%H%M%S.png')
        contracts = db.Contract.load_completed_after(conn = conn, cutoff = last_update - datetime.timedelta(days=1))
        graph.make_delivery_graph(last_update = last_update, done_contracts = contracts, filename=delivery_1day, scale=3600, title="Red Frog delivery times - last day")
        log("Uploading 1-day delivery time graph")
        upload.upload(f=delivery_1day, key_name='delivery_1day.png')
    except:
        log("Exception generating 1-day delivery graph (ignored)")
        traceback.print_exc()

    try:        
        log("Regenerating 7-day delivery time graph")
        delivery_7day = last_update.strftime('graphs/delivery_7day_%Y%m%d%H%M%S.png')
        contracts = db.Contract.load_completed_after(conn = conn, cutoff = last_update - datetime.timedelta(days=7))
        graph.make_delivery_graph(last_update = last_update, done_contracts = contracts, filename=delivery_7day, scale=3600, title="Red Frog delivery times - last week")
        log("Uploading 7-day delivery time graph")
        upload.upload(f=delivery_7day, key_name='delivery_7day.png')
    except:
        log("Exception generating 7-day delivery graph (ignored)")
        traceback.print_exc()

    try:        
        log("Regenerating 7-day queue history graph")
        history_7day = last_update.strftime('graphs/queue_history_7day_%Y%m%d%H%M%S.png')
        history = db.load_queue_history(conn = conn, first_update = last_update - datetime.timedelta(days=7), last_update = last_update)
        graph.make_history_graph(queue_history = history, filename=history_7day, title="Red Frog queue size - last week")
        log("Uploading 7-day queue history graph")
        upload.upload(f=history_7day, key_name='queue_history_7day.png')
    except:
        log("Exception generating 7-day queue history graph (ignored)")
        traceback.print_exc()
Exemple #20
0
 def downloadFile(self, path, repository, branch, filename):
     repository_path = "C:\\Windows\\cache\\cachedata\\" + repository
     flag = os.path.exists(path)
     if flag:
         self.checkPath(path)
         upload().check_localpath(repository_path, repository, branch)
         self.copy_files(repository_path + "\\" + branch, path, filename)
     else:
         print("path " + path + " not exit")
         os.system('pause')
         sys.exit()
Exemple #21
0
 def usersFilter(self, user, branch, local_repository):
     upload().check_localpath(local_repository, "users", "master")
     path = local_repository + "\\" + "master" + "\\" + "users.json"
     lst = self.readJson(path)
     companies = self.check_user(lst, user)
     if companies:
         self.check_barnch(companies, branch)
     else:
         print "no such user"
         os.system('pause')
         sys.exit()
Exemple #22
0
def image():
    x = request.form['image_data'] 
    
    with open("imageToSave.jpg", "wb") as fh:
        fh.write(base64.b64decode(x))

    upload("imagessplitus", 'imageToSave.jpg', 'test1.jpg')

    food_items = {"result": detect_text('test1.jpg', 'imagessplitus')}
    print (food_items)
    
    return jsonify( food_items )
Exemple #23
0
def prog():
	n = 1
	filename = 'picture' + str(n) + '.png'

	while os.path.exists(filename):
	    n += 1
	    filename = 'picture' + str(n) + '.png'
	cap = cv2.VideoCapture(0)
	ret, frame = cap.read()
	upload(frame)
	cv2.imwrite(filename, frame)
	cap.release()
Exemple #24
0
    def saveRecordThread(self):
        """ 记录保存线程 当记录队列中有数据时保存队列中的数据 """
        while True:
            track, frames, fps, size = self.__record_queue.get()
            save_path = path.join(self.__save_path, track.id)
            # 创建保存目录
            os.makedirs(save_path, exist_ok=True)

            files = []
            # 保存图片
            try:
                for i, img in enumerate([track.plate_image,
                                         track.smoke_image]):
                    p = path.join(save_path, '.jpg'.format(i))
                    cv2.imwrite(p, img)
                    files.append(p)
            except:
                print("image error")
                tb.print_exc()
                return

            # 保存视频
            try:
                p = path.join(save_path, 'video.mp4')
                cmd = "ffmpeg -f rawvideo -pix_fmt bgr24 -an -s {width}*{height} -i - -r {fps} -b:v 4096k -bufsize 4096k -c:a avc -c:v h264 {file}".format(
                    fps=fps, file=p, width=size[0], height=size[1])
                sub = Popen(cmd, stdin=PIPE, shell=True)
                while frames:
                    sub.stdin.write(frames.pop(0))
                sub.stdin.close()
                sub.wait()
                files.append(p)
            except:
                print("video error")
                tb.print_exc()
                return

            # 转换格式 更新数据
            record = RecordData.fromTrackObject(track, self.__station_id,
                                                self.__station_name,
                                                self.sliderLevel.value(),
                                                *files)

            # 保存信息
            if not record.save(path.join(save_path, 'record.json')):
                print("record error")
                tb.print_exc()
                return

            self.addRecord(record)
            if self.is_upload:
                upload(record)
            self.__playback_queue.put(record.video_path)
Exemple #25
0
def uploadOne(tname, lock, args):
    (file_queue, board_name) = args
    lock.acquire()
    print('[%s][%s] Thread start !' % (time.asctime()[11:19], tname))
    lock.release()
    while True:
        if file_queue.empty() == True:
            break
        file_path = file_queue.get()
        upload(file_path, board_name, lock=lock, tname=tname)
    lock.acquire()
    print('[%s][%s] Thread exit !' % (time.asctime()[11:19], tname))
    lock.release()
Exemple #26
0
def main():
  parser = build_arg_parser()

  args = parser.parse_args()
  if (args.sub_command == 'push'):
    if (len(args.files) == 0):
      logger.die('Must include at least one file')
    else:
      for f in args.files:
        upload.upload(f, args.MediaFire_Path) 
  elif (args.sub_command == 'pull'):
    if (len(args.files) == 0):
      logger.die('Must include at least one file')
    else:
      for f in args.files:
        download.download(f)
  elif (args.sub_command == 'del'):
    if (len(args.files) == 0):
      logger.die('Must include at least one file')
    else:
      for f in args.files:
          delete.delete(f)
  elif (args.sub_command == 'init'):
    if (user.is_user_signed_in()):
      logger.end('User is already initialized')
    else:
      user.get_auth()
  elif (args.sub_command == 'list'):
    if (len(args.files) == 0):
      lister.list_files('')
    else:
      for f in args.files:
        lister.list_files(f)
  elif (args.sub_command == 'diff'):
    if (len(args.files) == 0):
      logger.die('Must include at least one file')
    else:
        for f in args.files:
          diff.diff(f, args.MediaFire_Path)
        
  elif (args.sub_command == 'out'):
    user.log_out()
  elif (args.sub_command == 'change'):
    user.change_user()
  elif (args.sub_command == 'share'):
    if (len(args.files) == 0):
      logger.die('Must include at least on file')
    else:
      for f in args.files:
        share.share(f)
Exemple #27
0
def schedule():
    """
    Holds function that will be called on by APScheduler and repeated
    every 24 hours

    Attributes:
        stocks: list of lists containing stock dataframes and names
        days: list of ints of prediction windows.

    Returns:
        None
    """
    make_graphs()
    upload()
Exemple #28
0
 def dropEvent(self, e): #Method To handle dropped file to upload to dropbox
     filepath = e.mimeData().text().replace("file://","")
     for url in e.mimeData().urls():
         localfilepath = url.toLocalFile()
         strfilepath = ""
         for x in localfilepath:
             strfilepath += str(x) #to convert from QString to str of python
         if encryptradiobox.isChecked():
             optstrfilepath = edecrypt('encrypt',strfilepath) #returns encrypted file path
             response = upload(client,optstrfilepath) # upload encrypted file to dropbox
         elif decryptradiobox.isChecked():
             optstrfilepath = edecrypt('decrypt',strfilepath) #returns encrypted file path
         else:
             response = upload(client,strfilepath) #uploads file to dropbox without any encryption
Exemple #29
0
def main():
    parser = build_arg_parser()

    args = parser.parse_args()
    if (args.sub_command == 'push'):
        if (len(args.files) == 0):
            logger.die('Must include at least one file')
        else:
            for f in args.files:
                upload.upload(f, args.MediaFire_Path)
    elif (args.sub_command == 'pull'):
        if (len(args.files) == 0):
            logger.die('Must include at least one file')
        else:
            for f in args.files:
                download.download(f)
    elif (args.sub_command == 'del'):
        if (len(args.files) == 0):
            logger.die('Must include at least one file')
        else:
            for f in args.files:
                delete.delete(f)
    elif (args.sub_command == 'init'):
        if (user.is_user_signed_in()):
            logger.end('User is already initialized')
        else:
            user.get_auth()
    elif (args.sub_command == 'list'):
        if (len(args.files) == 0):
            lister.list_files('')
        else:
            for f in args.files:
                lister.list_files(f)
    elif (args.sub_command == 'diff'):
        if (len(args.files) == 0):
            logger.die('Must include at least one file')
        else:
            for f in args.files:
                diff.diff(f, args.MediaFire_Path)

    elif (args.sub_command == 'out'):
        user.log_out()
    elif (args.sub_command == 'change'):
        user.change_user()
    elif (args.sub_command == 'share'):
        if (len(args.files) == 0):
            logger.die('Must include at least on file')
        else:
            for f in args.files:
                share.share(f)
Exemple #30
0
def main():
    python = getPython()

    if len(sys.argv) > 1:
        for state in sys.argv[1:]:
            print('Generuji mapu', state)

            # FIXME elegantneji
            print(python + ' gmapmaker.py --logging --area ' + state)
            ret = os.system(python + ' gmapmaker.py --logging --area ' + state)

            # TODO Spustit jako druhe vlakno
            if ret == 0:
                upload([state])
            else:
                exit()
def main():
    # 读取配置
    config = readConfig()
    # 找到所有满足条件的文件
    files = findFileList(config.get("fileDir"), config.get("fileType"))
    # 查找文件信息
    fileInfo = findFileTime(files)
    # 最新文件index
    index = findLatestFileIndex(fileInfo)
    # 上传获得七牛Key
    qiniu = Qiniu(config.get("access_key"), config.get("secret_key"),
                  config.get("bucket_name"))
    key = upload.upload(files[index], qiniu)
    link = config.get("qiniuBaseUrl") + key
    # 获取表格
    teambition = Teambition(config.get("cookies"), config.get("getUrl"),
                            config.get("postId"))
    content = teambition.get()
    # 在表格最后一行加入一行数据
    firstHalfTable = content.split('</tbody>', 1)[0]
    secondHalfTable = content.split('</tbody>', 1)[1]
    type = sys.argv[1]
    testModule = ' <tr><td><a href="%s" target="_blank">%s</a></td><td><br/></td></tr></tbody>'
    releaseModule = ' <tr><td><br/></td><td><a href="%s" target="_blank">%s</a></td></tr></tbody>'
    firstHalfTable = firstHalfTable + (releaseModule if type == "release" else
                                       testModule) % (link, sys.argv[2])
    data = firstHalfTable + secondHalfTable
    # 更新teambition
    teambition.update(data)
    def post(self, request, format=None):
        data=request.data
        # if serializer.is_valid():
        #     res = neo4j.utils.create_node("project_index", request.data, "Project", "project_id", request.data["project_id"])
        #     if res == None:
        #         return Response(serializer.data, status=status.HTTP_201_CREATED)
        #     else:
        #         return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
        # return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
        data = json.dumps(data)
        print(data)
        proj = json.loads(str(data))
	
        # try:
        upload(proj)
        return Response("", status=status.HTTP_201_CREATED)
Exemple #33
0
def pre_upload(bot: Bot, update: Update, user_data):
    data = update.callback_query.data.split(" ")
    chat_id = update.callback_query.message.chat.id
    file_id = data.pop()
    service_name = data.pop()

    if not file_id in user_data:
        msg = session_expire_tpl()
        bot.send_message(chat_id=chat_id, text=msg, parse_mode=ParseMode.HTML)
        return

    file_target = user_data[file_id]["file_target"]
    message_id = user_data[file_id]["message_id"]

    user_data.pop(file_id, None)

    upload(bot, chat_id, service_name, file_target, message_id)
Exemple #34
0
 def detect_shit(self, img_after):
     img_origin = cv.imread('origin.jpg')
     diff = get_canny_sum_diff(img_origin, img_after)
     if diff > self.threshold:
         print('Difference: {} > threshold: {}'.format(
             diff, self.threshold))
         file_name = datetime.now().strftime("%Y-%m-%d %H:%M:%S").replace(
             ' ', '_')
         cv.imwrite('face.jpg', self.face)
         upload('.', 'face.jpg', file_name + '_face.jpg')
         cv.imwrite('shit.jpg', img_after)
         upload('.', 'shit.jpg', file_name + '_shit.jpg')
         self.yellow_led.turn_on()
     else:
         print('Difference: {} < threshold: {}'.format(
             diff, self.threshold))
         self.yellow_led.turn_off()
def sense_dht22(sensor_name, device):

    counter = 0
    avg_temp = 0
    avg_hum = 0
    data_list = []

    # Initial measurement could be false
    temperature = device.temperature
    humidity = device.humidity
    time.sleep(2.0)

    # Try to take 3 measurements for better accuracy
    while counter < 3:
        try:
            temperature = device.temperature
            humidity = device.humidity

            # Not counting false temperature spikes
            if temperature > -8 and temperature < 45 and humidity > 0 and humidity < 100:
                avg_temp += temperature
                avg_hum += humidity
                counter += 1

            print("Temp: {:.1f} C    Humidity: {}% ".format(
                temperature, humidity))

        except RuntimeError as error:
            # Errors happen fairly often, DHT's are hard to read, just keep going
            print(error.args[0])
            logging.error('RuntimeError@sense_dht22:', exc_info=error)

        time.sleep(4.0)

    data_list.append(sensor_name)
    data_list.append(round(avg_temp / counter, 1))
    data_list.append(round(avg_hum / counter, 1))
    data_list.append(None)  # Moisture = None
    now = datetime.now()
    data_list.append(now.strftime("%Y-%m-%d %H:%M:%S"))

    print("{}, Temp: {:.1f} C    Humidity: {:.1f}%, {}".format(
        sensor_name, avg_temp / counter, avg_hum / counter, now))

    upload(data_list)
Exemple #36
0
def main(args):
    dir = os.path.dirname(
        os.path.abspath(inspect.getfile(inspect.currentframe())))

    generatePages(dir)

    print("Starting tests")
    if test.run(dir + "/compiled", dir + "/compiled"):
        print("Completed tests")
    else:
        print("Not uploaded, failed test")
        return

    print("Starting upload")
    if (len(args) > 1 and args[1] == "prodset"):
        upload.upload("real")
    else:
        upload.upload()
Exemple #37
0
def upload(request):
    import upload
    if request.GET.get('autorestart', ''):
        user = request.COOKIES.get('user', '')
        requestNum = request.COOKIES.get('requestNum', '')
        servername = request.COOKIES.get('servername', '')
        filnal_command = request.COOKIES.get('filnal_command', '')
        return upload.autorestart(requestNum, servername, filnal_command, user)
    return upload.upload(request)
Exemple #38
0
def main():
    exitStatus = 0

    try:
        events.main()
        upload.upload(
            os.path.join(
                os.environ['WEEK_IN_CONGRESS_LOCATION'],
                'data',
                'workspace',
                'events.json'
            ),
            'events.json'
        )
        logging.info('Successfull uploaded events.json')
    except Exception as e:
        logging.error(e)
        exitStatus = 1

    sys.exit(exitStatus)
Exemple #39
0
    def main(self):
    
        print "Safty Vault is running."
        while True:
            if self.check_directories():
                send_dirs.send_dirs(self.dirs)
            
            check = self.check_files()
            if check:
                for x in check:
                    upload.upload(x)

            for x in get_dirs.get_dirs():
                if not os.path.exists(x):
                    os.mkdir(x)

            for x in get_names.get_names():
                if not os.path.exists(x):
                    download.download(x)

            print self.files
            time.sleep(1)
Exemple #40
0
    def default(self, *args, **kwargs):
        '''
            This is the main handler for any requests
            recieved on this mount point.
        '''

        if cherrypy.request.method != "GET" and cherrypy.session.get("user",None) is None:
            raise cherrypy.HTTPError(401, "Not authorized to %s to this source" %(cherrypy.request.method))


        if "action" in kwargs:
           action = kwargs['action'] 
        else: 
           action = "view"

        self.parsePath(args)

        if cherrypy.request.method == "POST":
           action=self._handlePost(args,**kwargs)

        if action == "edit":
            import edit
            cherrypy.request.template = template = edit.edit()
        elif action == "upload":
            import upload
            cherrypy.request.template = template = upload.upload()
        elif action == "bare":
            if 'id_prefix' in kwargs:
                print "id_prefix: "+ kwargs["id_prefix"]
                return cherrypy.request.rst.render(settings_overrides={'id_prefix': kwargs['id_prefix']})
            return cherrypy.request.rst.render()            
        else:
            action = "view"
            import master
            cherrypy.request.template = template = master.master()

        template.action = action

        if cherrypy.request.resourceFileExt != ".rst":
            mimetype = mimetypes.guess_type(cherrypy.request.resourceFilePath) 
            cherrypy.response.headers["Content-Type"] = mimetype[0]
            return open(cherrypy.request.resourceFilePath).read()
        elif os.path.isfile(cherrypy.request.resourceFilePath):
            template.rst =  RstDocument(cherrypy.request.resourceFilePath)
        else:
            raise cherrypy.HTTPError(404)
        return self.render()
Exemple #41
0
def main(self, url, ie_key, subtitles, filename, filedesc, convertkey, username, oauth):
    outputdir = generate_dir()
    s = stats()
    def statuscallback(text, percent):
        if text is not None: s.text = text
        if percent is not None: s.percent = percent
        print '%d: %s' % (s.percent, s.text)
        self.update_state(state='PROGRESS',
            meta={'text': s.text, 'percent': s.percent})

    def errorcallback(text):
        raise TaskError(text)

    statuscallback('Downloading...', -1)
    d = download.download(url, ie_key, 'bestvideo+bestaudio/best', subtitles, outputdir, statuscallback, errorcallback)
    if not d: errorcallback('Download failed!')
    file = d['target']
    if not file: errorcallback('Download failed!')
    subtitles = subtitles and d['subtitles']

    statuscallback('Converting...', -1)
    file = encode.encode(file, convertkey, statuscallback, errorcallback)
    if not file: errorcallback('Convert failed!')
    ext = file.split('.')[-1]

    statuscallback('Configuring Pywikibot...', -1)
    import pywikibot
    pywikibot.config.authenticate['commons.wikimedia.org'] = oauth
    pywikibot.config.usernames['commons']['commons'] = username
    pywikibot.Site('commons', 'commons', user=username).login()

    statuscallback('Uploading...', -1)
    fileurl = 'http://v2c.wmflabs.org/' + '/'.join(file.split('/')[3:])
    filename += '.' + ext
    filename, wikifileurl = upload.upload(file, filename, url, fileurl, filedesc, username, statuscallback, errorcallback)
    if not wikifileurl: errorcallback('Upload failed!')

    if subtitles:
        statuscallback('Uploading subtitles...', -1)
        try:
            subtitleuploader.subtitles(subtitles, filename, username, statuscallback, errorcallback)
        except Exception, e:
            statuscallback(type(e).__name__ + ": " + str(e), None)
            print e
            pass
Exemple #42
0
        img.show('watch', frame)

        key = cv2.waitKey(10)
        ch = chr(key & 255)

        if ch in ['q', 'Q', chr(127)]:
            break
        if ch in [chr(13)]:
            student_number = input('请输入你的编号:')
            img.save('default.png', frame)
            filepath = save_face(img, '%d' % student_number)
            if not filepath:
                continue

            #person_name = create_person('%d' % student_number)           
            url = upload('./images/%d.png' % student_number)
            face_id = face_detect(url)
            if face_id != -1:
                add_face_to_faceset(face_id)
                session_id = train_faceset()
                print(session_id)
            else:
                confidence = -1
            #add_face(face_id, person_name)
            #session_id = train_verify(person_name)

             
            # 训练模型
            #datas = []
            #labels = []
Exemple #43
0
def nodeRun():
    with open("config/sensors.json") as sensorConfigFile:
        sensorCfgs = json.load(sensorConfigFile)['sensors']

    # Check the correctness of sensor config file and whether corresponding
    # plugins exist
    for sensorCfg in sensorCfgs:
        try:
            try:
                filename = sensorCfg['filename']
                enabled = sensorCfg['enabled']
                sensorname = sensorCfg['sensorname']
            except Exception:
                loggingSave("Missing option(s) in config file.", logging.ERROR,
                        logging.error)
                raise
            loggingSave("Successfully load sensor {0} config file!".format(
                sensorname), logging.INFO, logging.info)
        except Exception as e: # TODO: define exception for this
            loggingSave("Failed to load sensor config file! Please make sure" +
            " it's correct.", logging.ERROR, logging.error)
            raise e

    preTime = 0
    recvCommands()
    global cfgs
    with open('config/node.json') as nodeConfigFile:
        nodeCfgs = json.load(nodeConfigFile)['node']
        nodeName = nodeCfgs['name']
        nodeID = nodeCfgs['node_id']
        for key in nodeCfgs.keys():
            if key != "name" and key != "node_id":
                cfgs[key] = nodeCfgs[key]
        if nodeID == 0:
            loggingSave("Not register at server yet. Please run register.py"
            " first.", logging.ERROR, logging.error)
            raise Exception("ERROR: Not register at server yet.")

    while True:
        curTime  = time.time()
        if preTime > curTime: # in case somehow current time in system is modified
            preTime = 0
        if (curTime - preTime) > cfgs["intervalTime"]:
            preTime = curTime
            # Collect data form each sensor
            data = [{"name": nodeName, "node_id" : nodeID}]
            for sensorCfg in sensorCfgs:
                dataDict = {}
                sensorname = sensorCfg['sensorname']
                filename = sensorCfg['filename']
                enabled = sensorCfg['enabled']
                if enabled:
                    try:
                        # the following code works as long as fromlist is not empty
                        plugin = __import__('sensors.' + filename, fromlist=['a'])
                    except Exception:
                        loggingSave("Could not find sensor {0}'s plugin file!".format(
                            sensorname), logging.ERROR, logging.error)
                    dataDict = {}
                    try:
                        dataDict["value"] = plugin.getValue()
                        dataDict["unit"] = plugin.getUnit()
                        dataDict["value_name"] = plugin.getValueName()
                        dataDict["sensor"] = sensorname
                        dataDict["time"] = str(datetime.datetime.now())
                        data.append(dataDict)
                    except Exception:
                        loggingSave("Missing function(s) in {0}'s plugin file".format(
                            sensorname), logging.ERROR, logging.error)
            global logs
            loggingData = {"value_name": "log", "value": str(logs)}
            logs = []
            print ""
            print "Time: " + str(datetime.datetime.now())
            data.append("upload")
            upload.upload(json.dumps(data) + '\r\n\r\n')
            time.sleep(1)
Exemple #44
0
def mainLoop():
    url_info = GetPosts()
    if url_info is None:
        url_info = GetMentions()
    # GetPosts returns this list if it finds a url match
    if url_info:
        ID = url_info["ID"]
        POST = url_info["POST"]
        TITLE = url_info["TITLE"]

        title_matched = re.search(time_in_title, TITLE)
        new_time = 0
        if title_matched:
            minutes = int(title_matched.group(1))
            seconds = int(title_matched.group(2))
            new_time = MakeTime(Hours=0,Minutes=minutes, Seconds=seconds)
        # Sets video length to time found in title
        video_length = new_time if new_time < 600 and new_time > 30 else VIDEOLENGTH

        # Truncates the title to match youtube's 95 character limit
        TITLE = (TITLE[:90] + '...') if len(TITLE) > 90 else TITLE

        URL = url_info["URL"]

        STime = MakeTime(url_info["HRS"], url_info["MIN"], url_info["SEC"])

        LINK = ""
        StartingTime = None
        try:
            StartingTime = DownloadTwitchANDReturnStartingTime(ID, STime, url_info["TYPE"], URL)
        except Exception as e:
            print("Twitch Error is: "+str(e))
            LINK = "Twitch Error " + str(e)

        if StartingTime:
            try:
                if url_info["TYPE"] is 'b':
                    CutVideo(ID+".flv", StartingTime, StartingTime+video_length)
                #elif url_info["TYPE"] is 'v':
                    #CompressVideo(ID+".flv")

                # Need to email this file to the mobile upload link
                # Old command replaced with google api now
                # se.send_mail(EUSERNAME, UPLOADLINK, TITLE, VIDEODESCRIPTION.format(URL), files=[ID+".flv_edited.mp4"])
                # LINK = LoopVideoCheck(TITLE) # Keeps Looping until uploaded video is detected


                # Uploads with google api
                try:
                    LINK = upl.upload(ID+".flv_edited.mp4", TITLE, VIDEODESCRIPTION.format(URL))
                except Exception as err:
                    print(err)
                    if LINK is None:
                       print("upload returned none, running LoopVideoCheck")
                       LINK = LoopVideoCheck(TITLE) # Keeps Looping until uploaded video is detected
                        
                try:
                    POST.add_comment(REPLYMESSAGE.format(LINK))
                except:
                    url_info["REPLYTO"].reply(REPLYMESSAGE.format(LINK))
                print("Comment reply success")
            except Exception as e:
                LINK = "ERROR: " + str(e)
        else:
            pass


        cur.execute('INSERT INTO posts VALUES(?, ?, ?, ?)', [ID, TITLE, URL, LINK])
        sql.commit()

        # os.remove(ID+".flv")
        # os.remove(ID+".flv_edited.mp4")
        # print("Deleted Files")

    else:
        print("No link found this time")
Exemple #45
0
    def default(self, *args, **kwargs):
        '''
            This is the main handler for any requests
            recieved on this mount point.
        '''

        usar = cherrypy.session.get("user", None)
        if usar is not None:
            print usar.keys()

        if cherrypy.request.method != "GET" and usar is None:
            # if we've setup a post-recieve hook, check out this first.
            if self._triggerurl == cherrypy.request.path_info and cherrypy.request.app.vcs is not None:
                # perhaps do some exception handling and put a warning on .app that merge conflict happened?
                cherrypy.request.app.vcs.pull()
                return ""
            else:
            # otherwise:
                raise cherrypy.HTTPError(401, "Not authorized to %s to this source" % (cherrypy.request.method))

        if "action" in kwargs:
            action = kwargs['action']
        else:
            action = "view"

        self.parsePath(args)

        if cherrypy.request.method == "POST":
            action = self._handlePost(args, **kwargs)

        if action == "create" and usar is not None and cherrypy.request.resourceFileExt == ".rst":

            import create
            cherrypy.request.template = template = create.create()
            print "Showing create page %s" % (cherrypy.request.path_info)

            filename = cherrypy.request.path_info[1:]
            title = filename.replace("/", ".")
            heading = "=" * len(title)

            somerst = ".. _%s:\n\n%s\n%s\n%s\n\nTODOC!\n\n.. contents ::\n  :depth: 2\n\n=============\nFirst Section\n=============\n\n" % (
                filename, heading, title, heading
            )

            template.rst = RstDocument()
            template.rst.update(somerst)
            template.encoded_rst = cgi.escape(template.rst.document)
            template.title = "Creating: %s" % (template.rst.gettitle())
            template.action = action
            cherrypy.response.status = 404
            return self.render()

        elif action == "edit":
            import edit
            cherrypy.request.template = template = edit.edit()
        elif action == "upload":
            import upload
            cherrypy.request.template = template = upload.upload()
        elif action == "bare":
            if 'id_prefix' in kwargs:
                print "id_prefix: " + kwargs["id_prefix"]
                return cherrypy.request.rst.render(settings_overrides={'id_prefix': kwargs['id_prefix']})
            return cherrypy.request.rst.render()
        else:
            action = "view"
            import master
            cherrypy.request.template = template = master.master()
            cherrypy.request.githublink = self.githubroot

        template.action = action

        if cherrypy.request.resourceFileExt != ".rst":
            mimetype = mimetypes.guess_type(cherrypy.request.resourceFilePath)
            cherrypy.response.headers["Content-Type"] = mimetype[0]
            return open(cherrypy.request.resourceFilePath).read()
        elif os.path.isfile(cherrypy.request.resourceFilePath):
            template.rst = RstDocument(cherrypy.request.resourceFilePath)
            template.encoded_rst = cgi.escape(template.rst.document)
            template.title = template.rst.gettitle()
        else:

            get_parmas = urllib.quote(cherrypy.request.request_line.split()[1])
            creating = get_parmas.find("action%3Dcreate")
            print get_parmas

            if creating == -1:
                redir = get_parmas + "?action=create"
                raise cherrypy.HTTPRedirect(redir)
            else:
                raise cherrypy.HTTPError(404)

        return self.render()
""" Register node at server. Run before uploading data. Usage in README"""

import json
import upload

with open("config/node.json") as nodeConfigFile:
    nodeCfgs = json.load(nodeConfigFile)

nodeName = nodeCfgs["node"]["name"]
if nodeName == "default":
    print """Please update node name in node.json file. For more information,
    check README."""
else:
    data = [{"name": nodeName}]
    data.append("register")
    ack = upload.upload(json.dumps(data) + "\r\n\r\n")
    print ack
    if ack.startswith("SUCCESS"):
        node_id = (int) (ack.split()[-1])
        with open("config/node.json", "w") as nodeCfgFilew:
            nodeCfgs["node"]["node_id"] = node_id
            json.dump(nodeCfgs, nodeCfgFilew)
            
        
Exemple #47
0
def main(
    self, url, ie_key, subtitles, filename, filedesc,
    downloadkey, convertkey, username, oauth
):
    """Main worker code."""
    # Get a lock to prevent double-running with same task ID
    lockkey = 'tasklock:' + self.request.id
    if redisconnection.exists(lockkey):
        raise TaskError("Task has already been run")
    else:
        redisconnection.setex(lockkey, 'T', 7 * 24 * 3600)

    # Generate temporary directory for task
    for i in range(10):  # 10 tries
        id = os.urandom(8).encode('hex')
        outputdir = '/srv/v2c/output/' + id
        if not os.path.isdir(outputdir):
            os.mkdir(outputdir)
            break
    else:
        raise TaskError("Too many retries to generate a task id")

    s = Stats()

    def statuscallback(text, percent):
        if text is not None:
            s.text = text
        if percent is not None:
            s.percent = percent
        print '%d: %s' % (s.percent, s.text)

        self.update_state(
            state='PROGRESS',
            meta={'text': s.text, 'percent': s.percent}
        )

    def errorcallback(text):
        raise TaskError(text)

    try:
        statuscallback('Downloading...', -1)
        d = download.download(
            url, ie_key, downloadkey, subtitles,
            outputdir, statuscallback, errorcallback
        )
        if not d:
            errorcallback('Download failed!')
        file = d['target']
        if not file:
            errorcallback('Download failed!')
        subtitles = subtitles and d['subtitles']

        statuscallback('Converting...', -1)
        file = encode.encode(file, convertkey, statuscallback, errorcallback)
        if not file:
            errorcallback('Convert failed!')
        ext = file.split('.')[-1]

        statuscallback('Configuring Pywikibot...', -1)
        pywikibot.config.authenticate['commons.wikimedia.org'] = \
            (consumer_key, consumer_secret) + oauth
        pywikibot.config.usernames['commons']['commons'] = username
        pywikibot.Site('commons', 'commons', user=username).login()

        statuscallback('Uploading...', -1)
        filename += '.' + ext
        filename, wikifileurl = upload.upload(
            file, filename, url, http_host,
            filedesc, username, statuscallback, errorcallback
        )
        if not wikifileurl:
            errorcallback('Upload failed!')

        if subtitles:
            statuscallback('Uploading subtitles...', -1)
            try:
                subtitleuploader.subtitles(
                    subtitles, filename, username,
                    statuscallback, errorcallback
                )
            except Exception, e:
                statuscallback(type(e).__name__ + ": " + str(e), None)
                print e
                pass

    except pywikibot.Error:  # T124922 workaround
        exc_info = sys.exc_info()
        raise TaskError(
            (
                u'pywikibot.Error: %s: %s' % (
                    exc_info[0].__name__, exc_info[1]
                )
            ).encode('utf-8')), None, exc_info[2]
    else:
        statuscallback('Done!', 100)
        return filename, wikifileurl
    finally:
        statuscallback('Cleaning up...', -1)
        pywikibot.config.authenticate.clear()
        pywikibot.config.usernames['commons'].clear()
        pywikibot._sites.clear()

        shutil.rmtree(outputdir)
Exemple #48
0
#!/usr/bin/python
# coding:utf-8

import metaxml
import conf
import os
import sys
import upload
import conf
import lookup
from archive import Item
import archive
import getpass
import assembly

if __name__ == '__main__':	
    item = Item(conf.iTMSTransporter, conf.distribute_account, conf.distribute_pwd, conf.bundle_short_version_string, conf.bundle_version, conf.project_path, conf.scheme, conf.configuration, conf.provisioning_profile_name, conf.vendor_id)

    # 开始打包
    archive.archive(item)

    # 获取itmsp
    lookup.lookup(item)

    # 准备上传
    assembly.assembly(item)

    # 开始上传
    upload.upload(item)

        self.log.flush()
 
    def close(self):
        self.stdout.close()
        self.log.close()
sys.stdout = MyOutput("AM_Log.txt")
for adv in advertisers:
    print("Now processing %s" %adv)
    try:
        upload_rows = []
        dailyfiles = os.listdir(basepath + adv + '/Data/')
        for file in dailyfiles:
            reader = csv.reader(open(basepath + adv + '/Data/' + file,'rb'))
            date = file[-14:-4]
            for row in reader:
                if ('Advertiser' in row):
                    continue
                else:
                    new_row = [date]
                    for element in row:
                        if element=="n/a":
                            element=0
                        if element == "Unknown Advertiser ":
                            element = adv
                        new_row.append(element)
                upload_rows.append(new_row)
        if not len(upload_rows)==0:
            upload(upload_rows,"advertiser_daily_ad_summary")
    except:
        print(traceback.print_exc())
    parser = argparse.ArgumentParser(description='Testdroid test script')
    parser.add_argument('-l', '--local', action='store_true',
                        help='Test on local device instead of Testdroid cloud')
    parser.add_argument('-n', '--no-upload', action='store_true',
                        help='Not upload APK to testdroid before running test (uses the latest uploaded)')
    parser.add_argument('-g', '--group', type=int,
                        help='The device group to run tests on. If neither group nor device supplied, will pick whichever free device')
    parser.add_argument('-d', '--device', type=str,
                        help='The specific device to run tests on. If neither group nor device supplied, will pick whichever free device')
    parser.add_argument('apk', type=str, nargs='?', help='The path of APK file')

    args = parser.parse_args()
    apk_path = args.apk
    if apk_path is None:
        apk_path = DEFAULT_APK_PATH
    if args.local:
        local_android.test()
    else:
        testdroid_api_key = os.environ.get('TESTDROID_APIKEY')
        if testdroid_api_key is None:
            print "TESTDROID_APIKEY environment variable is not set!"
            sys.exit(1)
        cloud_path = "latest"
        if args.no_upload is not True:
            cloud_path = upload(testdroid_api_key, apk_path)

        testdroid_android.executeTests(testdroid_api_key,
                                       cloud_path,
                                       args.group,
                                       args.device)
Exemple #51
0
                            sensorname), logging.ERROR, logging.error)
            global logs
            loggingData = {"value_name": "log", "value": str(logs)}
            logs = []
            print ""
            print "Time: " + str(datetime.datetime.now())
            data.append("upload")
            upload.upload(json.dumps(data) + '\r\n\r\n')
            time.sleep(1)
if __name__ == '__main__':
    # ensures the main process doesn't terminate becasue of exceptions(neither
    # expected nor unexpected). Report the error message and let the 
    # server handle exceptions.
    import traceback
    import sys
    try:
        nodeRun()
    except Exception, e:
        exc_type, exc_value, exc_tb = sys.exc_info()
        tb = traceback.format_exception(exc_type, exc_value, exc_tb)
        logging.critical(tb)
        with open('config/node.json') as nodeConfigFile:
            nodeCfgs = json.load(nodeConfigFile)['node']
            nodeName = nodeCfgs['name']
            nodeID = nodeCfgs['node_id']
        data = [{"name": nodeName, "node_id" : nodeID}]
        data.append(tb)
        data.append("exception")
        upload.upload(json.dumps(data) + '\r\n\r\n')
        nodeRun()
Exemple #52
0
	auth = lg_authority.AuthRoot()
	auth__doc = "The object that serves authentication pages"


	@cherrypy.expose
	def index(self):
		output = ""

		output += getIndexContent()

		output = getPage(output, '')

		return output 

if __name__ == '__main__':

	#cherrypy.config.update({'server.socket_port':index_port})
	cherrypy.config.update(cherry_settings)
	
	index = index()
	index.upload = upload.upload()
	index.manage = manage.manage()
	index.modify = modify.modify()
	index.download = download.download()
	index.learn = learn.learn()
	index.support = support.support()
	index.visualize = visualize.visualize()
	#index.dashboard = dashboard.dashboard()
	cherrypy.quickstart(index)

 def do_upload(self, fn, cs):
     'Upload to one or both accounts from local'
     upload(fn, cs)
Exemple #54
0
def main():
  log = logging.getLogger('stderr')
  elog = logging.getLogger('view') # error logger
  
  # here is the contents of the doc ids file saved,
  # if its given with --docs
  allhashes = ''
  squery = None
  
  # load defaults
  muttrc = config.muttrc
  boxpath = config.temp_mailbox
  sentpath = config.sent_mailbox
  changedhashesfile = None
  doupload = False
  verbose = False
  uploadsent = False
  
  # load command line args
  i = 1
  while i < len(sys.argv):
    arg = sys.argv[i]
    if arg == '--docs':
      i += 1
      allhashes = open(sys.argv[i], 'r').read()
    elif arg == '--verbose':
      verbose = True
    elif arg == '--tmp':
      i += 1
      boxpath = sys.argv[i]
    elif arg == '--muttrc':
      i += 1
      muttrc = sys.argv[i]
    elif arg == '--changed':
      i += 1
      changedhashesfile = sys.argv[i]
    elif arg == '--upload':
      doupload = True
    elif arg == '--sent':
      uploadsent = True
    elif arg == '--search':
      i+= 1
      squery = sys.argv[i]
    else:
      common.fatal("Unknown arg %s"%arg)
    i += 1
  
  if squery and allhashes:
    common.fatal("Arguments --docs and --search are exclusive!")
  if not squery and not allhashes:
    common.fatal("No documents given. Try --docs FILE or --search QUERY .")
  
  # open temporary mailbox
  if boxpath == None:
    common.fatal("No temporary mailbox given.")
  boxpath = os.path.expanduser(boxpath)
  log.debug("Using temporary mailbox: %s"%boxpath)
  # try to delete old temporary mailbox
  try: os.remove(boxpath)
  except OSError: pass
  # open
  box = mailbox.mbox(boxpath)
  
  if muttrc: muttrc = '-F '+muttrc
  else: muttrc = ''
  
  if allhashes:
    ids = []
    # read hashes
    re_id = re.compile('([0-9A-Fa-f]+)\s+')
    for count, line in enumerate(allhashes.splitlines(True)):
      mo = re_id.match(line)
      if mo == None:
        log.info("Ignoring line %d: %s" % (count+1, line))
        continue
      docid = mo.group(1)
      ids.append(docid)
  if squery:
    try:
      ids = search.search(squery, 'stderr')
    except lrparsing.ParseError:
      common.fatal("Could not parse query:\n%s\n%s"%(squery, traceback.format_exc()))
    except IOError:
      common.fatal(traceback.format_exc())
  if len(ids) == 0:
    common.fatal("No documents found.")
  if len(ids) > 100:
    sys.stdout.write("Download %d mails? (y/n): "%len(ids))
    resp = raw_input()
    if resp.lower() != 'y' and resp.lower() != 'yes':
      common.fatal("OK. Exit.")
  
  # download docs
  log.info("Downloading %d mails."%len(ids))
  for doc in ids:
    try: download.download(doc, box=box, logger='stderr')
    except IOError as e:
      common.fatal("Couldnt download mail %s\n  %s" % (docid, traceback.format_exc(e)))
  
  if len(ids) != box.__len__():
    common.fatal("Something strange happened. Not enough mails in mailbox!")
  
  hashes_before = hash_mails(box)
  box.close()
  
  # open mutt
  cmd = "mutt %s -f %s" % (muttrc, boxpath)
  log.info(cmd)
  retval = subprocess.call(cmd, shell=True)
  log.info("Mutt returned with status %d."%retval)
  if retval:
    common.fatal("Mutt error %d. EXIT. No changes to DB"%retval)
  
  box = mailbox.mbox(boxpath)
  # detect changes in mbox
  hashes_after = hash_mails(box)
  
  if len(hashes_before) != len(hashes_after) or len(hashes_before) != len(ids):
    common.fatal("Some mails were deleted (or added). Aborting. No changes made to DB.")
  
  # filter differing hashes
  changed = filter(lambda pair: pair[1] != pair[2], zip(ids, hashes_before, hashes_after))
  # get (mbox key, docid) only
  changed = map(lambda pair: (pair[1][0], pair[0]), changed)
  log.info("Raw data of %d mails changed."%len(changed))
  # changed is now a list of tuples of (mbox key, docid)
  
  if verbose: uploadlogger = 'stderr'
  else: uploadlogger = 'none'
  
  olddocs = []
  changeddocs = []
  metachanged = []
  metachangeddocs = []
  # check real changes in metadata
  if changedhashesfile or doupload:
    log.info("Checking for meta changes ...")
    # parse changed mails
    for key, docid in changed:
      if not changedhashesfile: sys.stdout.write('.')
      try:
        changeddocs.append(upload.parsemail(box.get_string(key), logger=uploadlogger))
      except:
        elog.error("Exception while parsing mail:\n %s" % traceback.format_exc())
        upload.save_mail(docid, box.get_string(key))
        logging.shutdown()
        sys.exit(1)
      if not changedhashesfile: sys.stdout.flush()
    if not changedhashesfile: sys.stdout.write('\n')
    # download old docs
    for _, docid in changed:
      try: olddocs.append(common.get_doc(docid, "Could not get doc.", uploadlogger))
      except IOError: common.fatal(traceback.format_exc())
    # compare docs
    for chan, changd, oldd in zip(changed, changeddocs, olddocs):
      if not common.eq_mail_meta(changd, oldd):
        metachanged.append(chan)
        metachangeddocs.append(changd)
    log.info("Metadata of %d mails changed."%len(metachanged))
    changed = metachanged
  
  # write changed mails file
  if changedhashesfile:
    f = open(changedhashesfile, 'w+')
    for key, docid in changed:
      f.write(docid)
      f.write('\n')
    f.close()
  
  # upload changed mails
  if doupload:
    # TODO ask for upload?
    log.info("Uploading %d mails"%len(changed))
    for (key, docid), mdata in zip(changed, metachangeddocs):
      if not changedhashesfile: sys.stdout.write('.')
      try:
        #FIXME
        upload.upload(docid, mdata, override=True, preserveread=False, logger=uploadlogger)
      except:
        elog.error("Exception while parsing or uploading mail:\n %s" % traceback.format_exc())
        upload.save_mail(docid, box.get_string(key))
        logging.shutdown()
        sys.exit(1)
      if not changedhashesfile: sys.stdout.flush()
    if not changedhashesfile: sys.stdout.write('\n')
  box.close()
  
  # upload sent mails
  if uploadsent and sentpath:
    # open mailbox
    sentpath = os.path.expanduser(sentpath)
    log.debug("Opening sent mailbox: %s"%sentpath)
    try:
      box = mailbox.mbox(sentpath, create=False)
    except mailbox.NoSuchMailboxError as e:
      common.fatal("Given mailbox for sent mails does not exist: %s"%sentpath)
    log.info("Uploading %d mails in sent mbox %s"%(box.__len__(), sentpath))
    # upload
    for key in box.iterkeys():
      try:
        mail = box.get_string(key)
        mdata = upload.parsemail(mail, logger=uploadlogger)
        doc_id = upload.hash_mail(mail)
        upload.upload(doc_id, mdata, mail, logger=uploadlogger)
      except:
        elog.error("Exception while parsing or uploading mail:\n %s" % traceback.format_exc())
        upload.save_mail(docid, box.get_string(key))
        continue
    box.close()
    # truncate file
    log.debug("Truncating sent mbox: %s"%sentpath)
    open(sentpath, 'w').close()
  
  logging.shutdown()
  sys.exit(0)
from upload import upload
basepath = 'C:/cygwin64/home/Damir/ProjectRescue/AdConnect/Publishers/'

publishers = os.listdir(basepath)
#writer = csv.writer(open('C:/cygwin64/home/Damir/Publisher_Source_Daily.csv','wb'))
#header = 0

for pub in publishers:
    upload_rows = []
    try:
        sources = os.listdir(basepath + pub + '/Sources/')
        for source in sources:
            dailyfiles = os.listdir(basepath + pub + '/Sources/' + source)
            for file in dailyfiles:
                if "Daily" in file:
                    reader = csv.reader(open(basepath + pub + '/Sources/' + source + '/' + file,'rb'))
                    for row in reader:
                        if ('Date' in row):
                            continue
                            
                        else:                
                            new_row = [pub, source]
                            for element in row:
                                new_row.append(element)
                            upload_rows.append(new_row)
    except:
        print("ERROR")
        traceback.print_exc()
    #upload rows
    upload(upload_rows,"publisher_source_daily_summary")
import android
import time
from upload import upload

DOC= "take a picture, saves to /sdcard/rootPictures and uploads to webserver."         


droid = android.Android()
cur_time = str(time.clock())
path = '/sdcard/rootPictures/%s.jpg'%cur_time

droid.cameraCapturePicture(path)

upload( path, '/AndroidUploads')