Esempio n. 1
0
def encrypt_public(key):
    #open the file conatining the id of DC
    name_of_dc = open('data/temporary_store.txt').read().replace('\n', '')
    encrypted_data = asymcrypt.encrypt_data(
        key, "data/" + name_of_dc + ".pem")  # encrypt with the public key
    hex_str = encrypted_data.hex()
    publish(name_of_dc, hex_str)  #publish the encrypted key to EDC
def bk(orderstr: str, sizes: Iterable[int], func_indices: List[int],
       samples: int) -> None:
    order = to_int(orderstr)
    stats_per_func_by_size = {}
    for size in sizes:
        begin = time.process_time()
        g, clique_count = read_random_graph(orderstr=orderstr, size=size)
        secs = time.process_time() - begin
        name = f"random of order {orderstr}, size {size}, {clique_count} cliques:"
        if order < 10:
            print(f"{name} {g.adjacencies}")
        else:
            print(f"{name} (creating took {secs:.3f}s)")
        stats = bron_kerbosch_timed(g,
                                    clique_count=clique_count,
                                    func_indices=func_indices,
                                    samples=samples)
        for func_index, func_name in enumerate(FUNC_NAMES):
            mean = stats[func_index].mean()
            if not isnan(mean):
                reldev = stats[func_index].deviation() / mean
                print(f"  {func_name:<8}: {mean:6.3f}s ± {reldev:.0%}")
        stats_per_func_by_size[size] = stats
    if len(stats_per_func_by_size) > 1:
        publish(language="python3",
                orderstr=orderstr,
                case_names=FUNC_NAMES,
                stats_per_func_by_size=stats_per_func_by_size)
Esempio n. 3
0
def sym_key():
    symmetricKey_KIS = b'aQOQxINtlrXU_HkbJywoMxfiFMXC-OToihHK2ApIeCs='
    KIS = Fernet(symmetricKey_KIS)
    KIE = Fernet.generate_key()  # generate symmetric key
    KIE_IoTD = KIS.encrypt((KIE))
    #print(KIE_IoTD)
    encrypt_public(KIE)
    publish("sensor_sym_key", KIE_IoTD)  # publish symmetric key to IoTD
Esempio n. 4
0
    def test_publish_update(self):
        """
        Publish an article update
        """
        f = open(os.path.join(here, 'sample.rst'))

        publish(f, draft=True, login_required=True)
        updated_article = Article.objects.get(pk=1)
        self.assertFalse(self.article.login_required)
        self.assertTrue(updated_article.login_required)
def abort_process(original_combination, temp_data, start):
    publish("sensor_sym_key", "abort")
    encrypt_public("nodata")
    benchmark()
    abort_reason("error", " \n")
    spoof_detect(original_combination, temp_data).to_csv(
        "/logs/error.log", mode="a",
        index=False)  # save the error details with time
    end = time.time()
    print("Time taken for the analysis after receving the data ", end - start)
    clearFiles()
Esempio n. 6
0
def build(do_publish=0):
    print("Build")
    print("-----")

    outfile = 'hardware.json'
    oldfile = 'backup.json'

    print("Backup current json...")
    oldjso = None
    if os.path.isfile(outfile) and not os.path.isfile(oldfile):
        os.rename(outfile, oldfile)

    errorlevel = 0

    errorlevel += parse_machines()

    if errorlevel == 0:
        errorlevel += vitamins()
    if errorlevel == 0:
        errorlevel += cut()
    if errorlevel == 0:
        errorlevel += printed()
    if errorlevel == 0:
        errorlevel += assemblies()
    if errorlevel == 0:
        errorlevel += machines()

    if errorlevel == 0:
        errorlevel += guides()

    catalogue()

    if errorlevel == 0 and do_publish > 0:
        publish()

    # if everything is ok then delete backup - no longer required
    if errorlevel == 0:
        os.remove(oldfile)

    try:
        if sys.platform == "darwin":
            check_output([
                'osascript', '-e',
                'display notification "Build Complete" with title "Build Process"'
            ])
    except:
        print("Exception running osascript")

    print("")
    print("==============")
    print("Build Complete")

    return errorlevel
Esempio n. 7
0
def build(do_publish=0):
    print("Build")
    print("-----")

    outfile = 'hardware.json'
    oldfile = 'backup.json'

    print("Backup current json...")
    oldjso = None
    if os.path.isfile(outfile) and not os.path.isfile(oldfile):
        os.rename(outfile, oldfile)

    errorlevel = 0

    errorlevel += parse_machines()

    if errorlevel == 0:
        errorlevel += vitamins()
    if errorlevel == 0:
        errorlevel += cut()
    if errorlevel == 0:
        errorlevel += printed()
    if errorlevel == 0:
        errorlevel += assemblies()
    if errorlevel == 0:
        errorlevel += machines()

    if errorlevel == 0:
        errorlevel += guides()

    catalogue();

    if errorlevel == 0 and do_publish > 0:
        publish()


    # if everything is ok then delete backup - no longer required
    if errorlevel == 0:
        os.remove(oldfile)

    try:
        if sys.platform == "darwin":
            check_output(['osascript','-e','display notification "Build Complete" with title "Build Process"'])
    except:
        print("Exception running osascript")

    print("")
    print("==============")
    print("Build Complete")

    return errorlevel
Esempio n. 8
0
def publish_reports(request):
    if request.method == 'POST':
        form = ReportPublishForm(request.POST)
        if form.is_valid():     
            translation.activate(form.cleaned_data['language'])
            request.LANGUAGE_CODE = translation.get_language()
            publish(form.cleaned_data['start_date'], form.cleaned_data['end_date'], form.cleaned_data['report'], form.cleaned_data['edition'])
            translation.deactivate()
            
        return HttpResponseRedirect('/admin/report/publish') # Redirect after POST
    
    return render_to_response('report_publish.html',
        {'form': ReportPublishForm(), },
        context_instance=RequestContext(request))
Esempio n. 9
0
def waitInput(key):
    # Open the configuration file and put content in config variable
    with open('config.json', 'r') as content_file:
        config = content_file.read()

    config = json.loads(config)
    infile_path = config[key]

    #long int, long int, unsigned short, unsigned short, unsigned int
    FORMAT = 'llHHI'
    EVENT_SIZE = struct.calcsize(FORMAT)

    #open file in binary mode
    in_file = open(infile_path, "rb")

    event  = in_file.read(EVENT_SIZE)
    event2 = in_file.read(EVENT_SIZE)
    codigo = ""

    while event:
        (tv_sec, tv_usec, type, code, value) = struct.unpack(FORMAT, event)
        (tv_sec, tv_usec, type, code, value) = struct.unpack(FORMAT, event2)

        if type != 0 or code != 0 or value != 0:
            if type==4 and code==4 and str(value) in config[infile_path]:

                tvalue = config[infile_path][str(value)]

                if tvalue == "/":
                    if len(codigo) != 0:
                        if (codigo.find(tvalue) == -1) and (codigo.find(".") == -1):
                            codigo = codigo + tvalue
                elif tvalue == ".":
                    if len(codigo) != 0:
                        if (codigo.find(tvalue) == -1) and (codigo.find("/") == -1):
                            codigo = codigo + tvalue
                elif tvalue=="ENTER":
                    if len(codigo) >= 1:
                        publish(codigo, key)
                    codigo = ""
                else:
                    codigo = codigo +  tvalue


        event = in_file.read(EVENT_SIZE)
        event2 = in_file.read(EVENT_SIZE)

    in_file.close()
Esempio n. 10
0
def check_annotations():
    while True:
        known_lampposts = annotation.get_lampposts()
        print '--------------------------------------- monitoring {} lampposts ---------------------------------------'.format(
            len(known_lampposts))
        for fid in known_lampposts:
            # print "> '{}':".format(fid)
            f_uri = r.get('f:uri:{}'.format(fid))
            agreements = stats.check_agreement(fid)
            agreed_color = None
            agreed_wattage = None
            agreed_cover = None
            position = annotation.get_lamppost_position(fid)
            position_dict = {}
            if position is not None:
                position_dict['latitude'] = position[0]
                position_dict['longitude'] = position[1]
            for agreement in agreements:
                agreement['id'] = fid
                attribute = agreement['attribute']
                if attribute == 'color':
                    agreed_color = agreement['value']
                if attribute == 'wattage':
                    agreed_wattage = agreement['value']
                if attribute == 'covered':
                    agreed_cover = agreement['value']
                value = get_current_aggreement(fid, attribute)
                if value != agreement['value']:
                    if f_uri is not None:
                        agreement['uri'] = f_uri
                    publish(dict(agreement, **position_dict))
                    set_current_aggreement(fid, attribute, agreement['value'])
                    annotation.delete_temporal(fid)
            try:
                if agreed_color and agreed_cover and agreed_wattage:
                    pollution = calculate_pollution(agreed_color, agreed_wattage, agreed_cover)
                    current_pollution = get_current_aggreement(fid, 'pollution')
                    if current_pollution is None or current_pollution != pollution:
                        set_current_aggreement(fid, 'pollution', pollution)
                        data = {'id': fid, 'attribute': 'pollution', 'value': str(pollution)}
                        if f_uri is not None:
                            data['uri'] = f_uri
                        publish(dict(data, **position_dict))
            except Exception as e:
                print e.message

        sleep(3)
Esempio n. 11
0
def quickpost(count):
    rows = wantudal.get_available_rows(count)
    for row in rows:
        postid = publish(row.savedpath, row.description, row.description)
        if postid:
            wantudal.set_published(row.url)
            print '[row_id:%d] is published as [post_id:%s]' % (row.id, postid)
        else:
            print '[row_id:%d] is failed to be published' % (row.id)
def verify_request():
    print("Received a new request")
    df1 = pd.read_csv("data/register_dc.csv",
                      delimiter=",",
                      names=["topic",
                             "time"])  # file having details of registered DC
    df1["time"] = pd.to_datetime(
        df1["time"],
        infer_datetime_format=True)  # converting to datatime format
    df2 = pd.read_csv("data/data_request.csv",
                      delimiter=",",
                      names=["topic", "time"])  # file with new request
    df2["time"] = pd.to_datetime(
        df2["time"],
        infer_datetime_format=True)  # converting to datatime format
    l = len(df1)  # when there are  more than one registered DC
    for i in range(0, l):
        if (df1.topic[i] == df2.topic[0]):
            if (df1.time[i] < df2.time[0]):
                df1.time[i] = datetime.datetime.now(
                )  # updating the time to the request time
                print(df1.time[i])  # print the request time
                publish("sensor_data_req",
                        "usbdata")  # requesting data from IoTD
                df1.to_csv("data/register_dc.csv", index=False, header=False)
                #print("DC name im request ",df2.topic[0])
                with open('data/temporary_store.txt', 'a+') as f:
                    f.write(
                        str(df2.topic[0])
                    )  # storing the EDC name to be used in other decider_proper.py
                    #print("file written")
                f.close()
                df2 = df2.drop(df2.index[[0]])  #removing the request details
                df2.to_csv("data/data_request.csv",
                           mode="w",
                           index=False,
                           header=False)  #update the file
            else:
                print("old request")  # Discard the old request
        else:
            pass
def build(do_publish=0):
    print("Build")
    print("-----")

    outfile = 'hardware.json'
    oldfile = 'backup.json'

    print("Backup current json...")
    oldjso = None
    if os.path.isfile(outfile) and not os.path.isfile(oldfile):
        os.rename(outfile, oldfile)

    errorlevel = 0

    errorlevel += parse_machines()

    if errorlevel == 0:
        errorlevel += vitamins()
    if errorlevel == 0:
        errorlevel += cut()
    if errorlevel == 0:
        errorlevel += printed()
    if errorlevel == 0:
        errorlevel += assemblies()
    if errorlevel == 0:
        errorlevel += machines()

    if errorlevel == 0:
        errorlevel += guides()

    catalogue();

    if errorlevel == 0 and do_publish > 0:
        publish()


    # if everything is ok then delete backup - no longer required
    if errorlevel == 0:
        os.remove(oldfile)

    return errorlevel
def benchmark():
    name_of_dc = open('data/temporary_store.txt').read().replace('\n', '')
    #print("name of DC is",name_of_dc)
    df1 = pd.read_csv("data/register_dc.csv",
                      delimiter=",",
                      names=["topic", "time"])
    df1["time"] = pd.to_datetime(
        df1["time"],
        infer_datetime_format=True)  # convert the object to datetime format
    #print(df1)
    #print("name of DC in bench",name_of_dc)
    #print(type(name_of_dc))
    a = (df1.loc[df1.topic == name_of_dc, "time"].iloc[0])
    #print("a  ",a)
    c = (datetime.datetime.now() - a)
    print("Time taken benchmark ", c.total_seconds())
    publish("sc_time", str(c.total_seconds()))
    print("*************************************************")
    f = open("data/temporary_store.txt", "w")
    f.truncate()
    f.close()
def build(do_publish=0):
    print("Build")
    print("-----")

    outfile = 'hardware.json'
    oldfile = 'backup.json'

    print("Backup current json...")
    oldjso = None
    if os.path.isfile(outfile) and not os.path.isfile(oldfile):
        os.rename(outfile, oldfile)

    errorlevel = 0

    errorlevel += parse_machines()

    if errorlevel == 0:
        errorlevel += vitamins()
    if errorlevel == 0:
        errorlevel += cut()
    if errorlevel == 0:
        errorlevel += printed()
    if errorlevel == 0:
        errorlevel += assemblies()
    if errorlevel == 0:
        errorlevel += machines()

    if errorlevel == 0:
        errorlevel += guides()

    catalogue()

    if errorlevel == 0 and do_publish > 0:
        publish()

    # if everything is ok then delete backup - no longer required
    if errorlevel == 0:
        os.remove(oldfile)

    return errorlevel
Esempio n. 16
0
def publish_apk():
    content = request.json
    print(content)
    folder = str("/tmp/pwa_apk" + str(int(time.time() * 1000)))
    os.system("mkdir " + folder)
    file = open(folder + '/pwa.apk', 'wb')
    file.write(
        base64.b64decode(content['apk'].replace(
            'data:application/vnd.android.package-archive;base64,', '')))
    file.close()
    outcome = publish.publish(content['client_id'], content['client_key'],
                              content['app_id'], folder + "/pwa.apk")
    return outcome
def gcs_to_bq(data, context):
    """Background Cloud Function to be triggered by Cloud Storage.
       This function loads a CSV file imported to Cloud Storage to Google BigQuery.
       Load job can be configured to import different CSVs to different BQ tables.
       Parameters are taken from config.json file and schema directory

    Args:
        data (dict): The Cloud Functions event payload.
        context (google.cloud.functions.Context): Metadata of triggering event.
    Returns:
        None; the output is written to Stackdriver Logging
    """

    project_id = 'PROJECT_ID'  # Add project ID
    success_topic_name = 'bq-upload-success'  # Add pub sub topic name
    error_topic_name = 'bq-upload-error'  # Add pub sub topic name

    # Success message variables
    file_name = data['name']
    upload_bucket_name = data['bucket']
    config_file_path = 'config.json'
    with open(config_file_path) as config_file:
        config_data = json.load(config_file)
    for config in config_data:
        if config['file_name'] == file_name:
            table_name = config['table_name']
            dataset_name = config['dataset_name']

    #Load and post result
    try:
        ltbq().load_to_bq(data, context)
        data_package = u'File gs://{}/{} successfully loaded to {}.{}'.format(
            upload_bucket_name, file_name, dataset_name, table_name)
        data_package.encode('utf-8')
        publish().publisher(project_id, success_topic_name, data_package)
    except Exception as err:
        error_package = str(err).encode('utf-8')
        publish().publisher(project_id, error_topic_name, error_package)
def gcs_to_bq(data, context):
    """Background Cloud Function to be triggered by Cloud Storage.
       This function loads a CSV file imported to Cloud Storage to Google BigQuery.
       Load job can be configured to import different CSVs to different BQ tables.
       Parameters are taken from config.json file and schema directory

    Args:
        data (dict): The Cloud Functions event payload.
        context (google.cloud.functions.Context): Metadata of triggering event.
    Returns:
        None; the output is written to Stackdriver Logging
    """

    project_id = 'PROJECT_ID' # Add project ID
    success_topic_name = 'bq-upload-success' # Add pub sub topic name
    error_topic_name = 'bq-upload-error' # Add pub sub topic name

    # Success message variables
    file_name = data['name']
    upload_bucket_name = data['bucket']
    config_file_path = 'config.json'
    with open(config_file_path) as config_file:
        config_data = json.load(config_file)
    for config in config_data:
        if config['file_name'] == file_name:
            table_name = config['table_name']
            dataset_name = config['dataset_name']

    #Load and post result
    try:
        ltbq().load_to_bq(data, context)
        data_package = u'File gs://{}/{} successfully loaded to {}.{}'.format(upload_bucket_name, file_name, dataset_name, table_name)
        data_package.encode('utf-8')
        publish().publisher(project_id, success_topic_name, data_package)
    except Exception as err:
        error_package = str(err).encode('utf-8')
        publish().publisher(project_id, error_topic_name, error_package)
Esempio n. 19
0
 def do_GET(self):
     try:
         parseRes = urllib.parse.urlparse(self.path)
         if parseRes.path == '/publish':
             if publish() != 0:
                 raise Exception('publish error!')
     except:
         self.send_response(500)
         self.send_header("Content-Type", "text/html; charset=UTF-8")
         self.end_headers()
         self.wfile.write(bytes("internal error!", "utf-8"))
     else:
         self.send_response(200)
         self.send_header("Content-Type", "text/html; charset=UTF-8")
         self.end_headers()
         self.wfile.write(bytes('done', "utf-8"))
Esempio n. 20
0
    def test_publish_new(self):
        """
        Publish a new article
        """
        # Make a different title
        title = 'Here is another article'
        pubstr = '2020-09-01 12:00'
        pubdate = filter_field('publish', { 'publish': pubstr })
        contents = open(os.path.join(here, 'sample.rst')).read().replace(
            'This is my title', title)
        f = StringIO(contents)

        a = publish(f, draft=True, publish=pubstr, debug=True)
        self.assertNotEqual(a.pk, self.article.pk)
        self.assertEqual(a.title, title)
        self.assertEqual(a.publish_date, pubdate)
        self.assertTrue('Some content' in a.content)
        followups = a.followup_for.all()
        self.assertTrue(self.article.pk in [a.pk for a in followups], followups)
        related = a.related_articles.all()
        # We have to use titles due to ambiguous slug
        self.assertTrue(self.article.title in [b.title for b in related], related)
Esempio n. 21
0
def write_output(entries):
    # Write HTML output
    logger.debug("Publishing HTML 5 file...")
    publish.publish(
        config.HTML5_TEMPLATE, 
        config.HTML5_OUTPUT_FILE, 
        entries[:config.NUM_ENTRIES]
    )
    # Write Atom output
    logger.debug("Publishing Atom file...")
    publish.publish(
        config.ATOM_TEMPLATE, 
        config.ATOM_OUTPUT_FILE, 
        entries[:config.NUM_ENTRIES], 
        opt_template_values = {
            'feed_title': config.ATOM_FEED_TITLE, 
            'feed_subtitle': config.ATOM_FEED_SUBTITLE, 
            'feed_url': config.ATOM_FEED_URL
        }
    )
    # Write a link-only feed
    links = []
    for entry in entries:
        if entry.type == 'link':
            links.append(entry)
    logger.debug("Publishing Atom links file...")
    publish.publish(
        config.ATOM_TEMPLATE,
        config.ATOM_LINKS_OUTPUT_FILE,
        links[:config.NUM_ENTRIES], 
        opt_template_values = {
            'feed_title': config.ATOM_LINKS_FEED_TITLE, 
            'feed_subtitle': config.ATOM_LINKS_FEED_SUBTITLE, 
            'feed_url': config.ATOM_LINKS_FEED_URL
        }
    )
        'Sr20R21_a0_p0_U20_z10_tr.hdf5',
        'Sr20R21_a0_p0_U20_z05_tr.hdf5',
        'Sr20R21_a0_p0_U20_z00_tr.hdf5',
    ]

    do_the_streamwise_coherence_analysis( 
        hdf_list_to_process,
        overwrite = True
    )

    #do_the_vertical_coherence_analysis(
    #    hdf_list_to_process,
    #    plot_individual = False,
    #    overwrite = True
    #    )



from os.path import join
import publish
root = join('/home/carlos/Documents/PhD/Articles/Article_2',
            'Article2_Scripts/time_resolved_scripts/LineReservedData/')


#get_relevant_wall_normal_data_from_pandas_hdf(exceptions = ['STE'])
#get_relevant_wall_normal_data_from_pandas_hdf()
#get_relevant_wall_normal_data_from_pandas_hdf(exceptions = ['z05','STE','z00'])
#do_the_time_resolved_analysis()
correlation_coherence_and_length_scale_analysis()
publish.publish()
                streamwise_correction     = case[1].x_corr,
            )

        df = tar.get_dimensionless_inner_variables(
            df,
            correction = 0,
            Cf = case[1].Cf,
        )

        df['x_loc'] = case[1].x_loc


        if time_resolved:
            bl_file.replace('.csv','_TR.csv') 
        tar.write_boundary_layers(df, boundary_layers_file = bl_file)

bl_file = "Boundary_layer_information.csv"
if isfile(bl_file):
    remove(bl_file)

#get_tr_streamlined_surface(z_loc = 0)

get_trailing_edge_for_all_cases_at_TE_m1()
#get_trailing_edge_for_all_cases_at_x_m1()

#get_trailing_edge_for_all_TR_cases_at_TE_m1()
#get_trailing_edge_for_all_TR_cases_at_x_m1()

#write_wall_normal_lines_to_csv()
publish.publish()
Esempio n. 24
0
#!/usr/bin/env python3
import os, re, publish

publish.publish("Debug", ['win-x64', 'linux-x64'])
Esempio n. 25
0
#!/usr/bin/env python3
import os, re, publish

publish.publish("Release", ['win-x64', 'linux-x64'])
Esempio n. 26
0
## Licensees holding valid Syntext Serna commercial licenses may use this file
## in accordance with the Syntext Serna Commercial License Agreement provided
## with the software, or, alternatively, in accorance with the terms contained
## in a written agreement between you and Syntext, Inc.
##
## GNU GENERAL PUBLIC LICENSE USAGE
## Alternatively, this file may be used under the terms of the GNU General
## Public License versions 2.0 or 3.0 as published by the Free Software
## Foundation and appearing in the file LICENSE.GPL included in the packaging
## of this file. In addition, as a special exception, Syntext, Inc. gives you
## certain additional rights, which are described in the Syntext, Inc. GPL
## Exception for Syntext Serna Free Edition, included in the file
## GPL_EXCEPTION.txt in this package.
##
## You should have received a copy of appropriate licenses along with this
## package. If not, see <http://www.syntext.com/legal/>. If you are unsure
## which license is appropriate for your use, please contact the sales
## department at [email protected].
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##

from PublishingPlugin import PublishingPlugin

import sys

if __name__ == "__main__":
    from publish import publish
    publish(sys.argv)
Esempio n. 27
0
def send_message():
    msg = request.form.get('msg')
    return publish.publish(msg) or redirect(url_for("index"))
Esempio n. 28
0
def build(argv):

    doCatalogue = True
    doQuick = False
    doPublish = False
    try:
        opts, args = getopt.getopt(argv, "hcqp", [])
    except getopt.GetoptError:
        print 'build.py -h -c -q -p'
        print ''
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print 'Usage: -h -c -q -p'
            print ''
            print '  -c   Skip catalogue'
            print '  -p   Publish: auto commit and push to git'
            print '  -q   Quick build - skip assemblies, guide and catalogue'
            sys.exit()
        if opt in ("-c"):
            doCatalogue = False
        if opt in ("-q"):
            doQuick = True
            doCatalogue = False
        if opt in ("-p"):
            doPublish = True

    print("Build")
    print("-----")

    outfile = 'hardware.json'
    oldfile = 'backup.json'

    print("Backup current json...")
    oldjso = None
    if os.path.isfile(outfile) and not os.path.isfile(oldfile):
        os.rename(outfile, oldfile)

    errorlevel = 0

    errorlevel += parse_machines()

    if errorlevel == 0:
        errorlevel += vitamins()
    if errorlevel == 0:
        errorlevel += cut()
    if errorlevel == 0:
        errorlevel += printed()
    if errorlevel == 0 and not doQuick:
        errorlevel += assemblies()
    if errorlevel == 0:
        errorlevel += machines()

    if errorlevel == 0 and not doQuick:
        errorlevel += guides()

    if doCatalogue and not doQuick:
        catalogue()

    if errorlevel == 0 and doPublish > 0:
        publish()

    # if everything is ok then delete backup - no longer required
    if errorlevel == 0:
        os.remove(oldfile)

    try:
        if sys.platform == "darwin":
            check_output([
                'osascript', '-e',
                'display notification "Build Complete" with title "Build Process"'
            ])
    except:
        print("Exception running osascript")

    print("")
    print("==============")
    print("Build Complete")

    return errorlevel
Esempio n. 29
0
# -*- coding: utf-8 -*-
from publish import publish

__author__ = '*****@*****.**'

if __name__ == '__main__':
    publish(u'狼人们别熬夜了,晚安')
Esempio n. 30
0
## Licensees holding valid Syntext Serna commercial licenses may use this file
## in accordance with the Syntext Serna Commercial License Agreement provided
## with the software, or, alternatively, in accorance with the terms contained
## in a written agreement between you and Syntext, Inc.
## 
## GNU GENERAL PUBLIC LICENSE USAGE
## Alternatively, this file may be used under the terms of the GNU General 
## Public License versions 2.0 or 3.0 as published by the Free Software 
## Foundation and appearing in the file LICENSE.GPL included in the packaging 
## of this file. In addition, as a special exception, Syntext, Inc. gives you
## certain additional rights, which are described in the Syntext, Inc. GPL 
## Exception for Syntext Serna Free Edition, included in the file 
## GPL_EXCEPTION.txt in this package.
## 
## You should have received a copy of appropriate licenses along with this 
## package. If not, see <http://www.syntext.com/legal/>. If you are unsure
## which license is appropriate for your use, please contact the sales 
## department at [email protected].
## 
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
## 

from PublishingPlugin       import PublishingPlugin

import sys

if __name__ == "__main__":
    from publish import publish
    publish(sys.argv)
                print("The sensors are all authentic")
                if (len(
                        spoof_detect(a[["Sensor", "Type"]],
                                     b[["Sensor", "Type"]])) == 0):
                    print("		The types are all authentic")
                    if (len(
                            spoof_detect(a[["Sensor", "Type", "Units"]],
                                         b[["Sensor", "Type",
                                            "Units"]])) == 0):
                        print("			The units are all authentic")
                        sym_key()  # generate the symmetric key
                        benchmark()  # benchmark process
                        clearFiles()
                    else:
                        print("there are fake units")
                        publish("sensor_sym_key",
                                "abort")  # publish abort data message to IoTD
                        abort_process(a, b, start)
                        clearFiles()
                else:
                    print("there are fake categories")
                    publish("sensor_sym_key",
                            "abort")  # publish abort data message to IoTD
                    abort_process(a, b, start)
                    clearFiles()
            else:
                print("there are fake sensors")
                publish("sensor_sym_key",
                        "abort")  # publish abort data message to IoTD
                abort_process(a, b, start)
                clearFiles()
Esempio n. 32
0
from extract import extract
from publish import publish
import util

if __name__ == "__main__":
    config = configparser.ConfigParser()
    config.read('config')

    logfile = util.get_conf_logfile(config, default='log')
    loglevel = util.get_conf_loglevel(config, default=logging.DEBUG)

    logger = logging.getLogger(__file__)
    formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(filename)s:'
                                  '%(lineno)d(%(funcName)s) %(msg)s')

    stream_handler = logging.StreamHandler()
    stream_handler.setFormatter(formatter)
    logger.addHandler(stream_handler)

    file_handler = logging.FileHandler(logfile)
    file_handler.setFormatter(formatter)
    logger.addHandler(file_handler)

    logger.setLevel(loglevel)

    logger.info('Starting win10_lockscreen_extract...')

    extract(config)
    arrange(config)
    publish(config)
Esempio n. 33
0
def build(argv):

    doCatalogue = True
    doQuick = False
    doPublish = False
    try:
        opts, args = getopt.getopt(argv,"hcqp",[])
    except getopt.GetoptError:
        print 'build.py -h -c -q -p'
        print ''
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print 'Usage: -h -c -q -p'
            print ''
            print '  -c   Skip catalogue'
            print '  -p   Publish: auto commit and push to git'
            print '  -q   Quick build - skip assemblies, guide and catalogue'
            sys.exit()
        if opt in ("-c"):
            doCatalogue = False
        if opt in ("-q"):
            doQuick = True
            doCatalogue = False
        if opt in ("-p"):
            doPublish = True

    print("Build")
    print("-----")

    outfile = 'hardware.json'
    oldfile = 'backup.json'

    print("Backup current json...")
    oldjso = None
    if os.path.isfile(outfile) and not os.path.isfile(oldfile):
        os.rename(outfile, oldfile)

    errorlevel = 0

    errorlevel += parse_machines()

    if errorlevel == 0:
        errorlevel += vitamins()
    if errorlevel == 0:
        errorlevel += cut()
    if errorlevel == 0:
        errorlevel += printed()
    if errorlevel == 0 and not doQuick:
        errorlevel += assemblies()
    if errorlevel == 0:
        errorlevel += machines()

    if errorlevel == 0 and not doQuick:
        errorlevel += guides()

    if doCatalogue and not doQuick:
        catalogue()

    if errorlevel == 0 and doPublish > 0:
        publish()


    # if everything is ok then delete backup - no longer required
    if errorlevel == 0:
        os.remove(oldfile)

    try:
        if sys.platform == "darwin":
            check_output(['osascript','-e','display notification "Build Complete" with title "Build Process"'])
    except:
        print("Exception running osascript")

    print("")
    print("==============")
    print("Build Complete")

    return errorlevel
Esempio n. 34
0
 def update(_):
     #print('update')
     (t, p, h) = sensors.measure(sens)
     publish.publish(pub, t, p, h)
Esempio n. 35
0
def main():
    if setting.daemon:
        daemonize(setting.pid_file)
    
    formatter = logging.Formatter("[%(levelname)s@%(created)s] %(message)s")
    file_handler = logging.FileHandler(setting.log_file)
    file_handler.setFormatter(formatter)
    
    log = logging.getLogger(setting.log_hdr)
    
    log.setLevel(setting.log_level)
    log.addHandler(file_handler)
    
    if setting.log_level == logging.DEBUG:
        stdout_handler = logging.StreamHandler()
        stdout_handler.setFormatter(formatter)
        log.setLevel(setting.log_level)
        log.addHandler(stdout_handler)

    log.info("logger setting up")

    while True:
        c = cache.Cache()
        t_begin = time.time()
        post = xinpinla.crawler_page(c)
        publish_flag = False
        for k, l in post.items():
            post_time = k
            for d in l:
                title = d["title"]
                link = d["link"]
                desc = d["desc"]
                
                s = kr36.search(title)
                if s == None:
                    n = datetime.datetime.now()
                    n = "%04d-%02d-%02d %02d:%02d:%02d" % (n.year, n.month, n.day, n.hour, n.minute, n.second)
                    s = {"time":n, "title":title, "link":None, "detail":desc}
                
                post_dat = {"pg_time":post_time, "pg_title":title, "pg_link":link, "pg_desc":desc, \
                            "rst_time":s["time"], "rst_title":s["title"], "rst_link":s["link"], "rst_desc":s["detail"]}
                
                log.debug("title: = > " + post_dat["pg_title"])
                
                parse = html2markdown.Html2MarkdownParser()
                parse.feed(post_dat["rst_desc"].strip())
                detail = parse.get_markdown()
                log.info("hexo => " + post_dat["pg_title"])
                publish_flag = hexo.hexo(post_dat)
                if publish_flag == True:
                    log.info("hexo => " + post_dat["pg_title"] + " success")
                    log.debug("append sync: %s=%s" % (k, link))
                    c.append_sync_one(k, link)

        del c
        if publish_flag == True:
            hexo.post_hexo()
        
        publish.publish()
        
        t_end = time.time()
        
        log.info("it cost %s seconds for one single loop. start = %d, end = %d" % (t_end - t_begin, t_begin, t_end))        
        log.info("going to sleep %d seconds for next loop" % (setting.sleep_time, ))
        time.sleep(setting.sleep_time)

    log.info("done")
Esempio n. 36
0
import publish
import sys
min = 0
max = 0
if len(sys.argv) == 3:
		min = int(sys.argv[1])
		max = int(sys.argv[2])
		
def service_func():
	print 'Publishing...'

if __name__ == '__main__':
	service_func()
	for i in range(min,max+1):
		publish.publish(i,i+1)
Esempio n. 37
0
    global args
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "book",
        help=
        "Book to publish, where id is the directory where the book is located. Works only with very small books. For other purposes try to use GitHub pages or other available hosting services."
    )
    parser.add_argument("--shorten",
                        help="Shorten generated URL with bit.ly",
                        action='store_true')
    parser.add_argument(
        "--bypass",
        help=
        "Bypass size restriction (not recommended. only for debugging purposes)",
        action='store_true')
    args = parser.parse_args()


if __name__ == "__main__":
    set_args()

    book = Book(args.book)

    html_file = f"{args.book}/{args.book}.html"
    if not os.path.isfile(html_file):
        print(
            f"Build your book first with:\nmake html BOOK={args.book} OFFLINE=no"
        )
    else:
        publish(html_file, book.title, args.shorten, args.bypass)