Ejemplo n.º 1
0
 def __init__(self, Mp=params.get("LRS"), Mn=params.get("HRS"), delay=0):
     self.Mp = Mp
     self.Mn = Mn
     self.delay = [0] * (delay)
     self.VDD = params.get("VDD")
     self.VSS = params.get("VSS")
     self.MID = (self.VDD - self.VSS) / 2 + self.VSS
     self.cycles = params.get("cycles")
     self.G = np.ones(self.cycles) * (1 / self.Mp - 1 / self.Mn)
     self.activity = np.zeros(self.cycles)
Ejemplo n.º 2
0
def lpr_serachDB(lpr_search):
    base_url = params.get("Elasticsearch", "lpr")
    url = base_url.format(10, lpr_search)
    found, list_dict = elastic_serachDB(url)
    #app.logger.info("Found = {} \n elastic_serachDB OUT = {}".format(found,list_dict))
    lpr_time_all = ""
    lpr = ""
    lpr_original = ""
    lpr_preview = ""
    old_time = ""
    if (found > 0):
        i = 0
        for each_dict in list_dict:
            if (i == 0):
                lpr = each_dict["lpr"]
                lpr_original = format_image(each_dict["origin_file"])
                lpr_preview = format_image(each_dict["crop_file"])
                app.logger.info("LPR = {}".format(lpr))
            each_time = format_time(each_dict["time"])
            if (old_time != each_time):
                lpr_time_all = "{}\n{}, {}".format(lpr_time_all, lpr,
                                                   each_time)
                old_time = each_time

            i = i + 1
            #return found,lpr_time_all,lpr,lpr_original,lpr_preview
    app.logger.info("LPR Time = {}".format(lpr_time_all))
    app.logger.info("Found = {}".format(found))
    return found, lpr_time_all, lpr, lpr_original, lpr_preview
Ejemplo n.º 3
0
def lpr_ai4thai(origin_file, out_file, crop_file):
    url = params.get('AI4Thai', 'lpr_url')
    payload = {'crop': '1', 'rotate': '1'}
    files = {'image': open(crop_file, 'rb')}

    apikey = getAPIKey()

    headers = {
        'Apikey': apikey,
    }
    response = requests.post(url, files=files, data=payload, headers=headers)
    try:
        print("AI4Thai LPR = " + response.json()[0]["lpr"])
        data_dict = {}
        data_dict["time"] = utils.getCurrentTime()
        data_dict["lpr"] = response.json()[0]["lpr"]
        data_dict["origin_file"] = origin_file
        data_dict["out_file"] = out_file
        data_dict["crop_file"] = crop_file
        #print(json.dumps(data_dict))
        es = db.connect()
        result = db.insert(es, json.dumps(data_dict), indexName="lpr")
        print("Elastic : Successful = {}\n-----------".format(
            result["_shards"]["successful"]))
    except Exception as e:
        print('LPR error: {}'.format(str(response.json()["message"])))
    return response
def main():
  # Pull parameters from URL and POST data
  post_json = json.JSONDecoder().decode(params.get().getvalue("data")) # FIXME : not final
  store_id = params.get().getvalue("storeid") # FIXME : not final

  if int(store_id) in c.STORES:
    store_name = c.STORES[store_id][0]
  else:
    raise errors.StoreError()

  # Bring in the HTML template.
  parser = etree.HTMLParser()
  HTMLtree = etree.parse("/var/www/html/"+store_name+"_store_base.html", parser)


  # Create a list of strings representing content_item_ids that are entitled.
  entitled = []
  for entitled_id in post_json["entitled"]: # FIXME : not final
    entitled.append(str(entitled_id))

  # Create a list of strings representing content_item_ids that are installed.
  installed = []
  for installed_id in post_json["installed"]: # FIXME : not final
    installed.append(str(installed_id))


  # Change all the buttons on the page to their correct state:
  for entitled_item in entitled:
    if entitled_item in installed:
      continue
    else:
      ent_links = HTMLtree.findall(".//a[@class='item"+entitled_item+"']")
      for ent_link in ent_links:
        ent_button = ent_link.find("div")
        ent_button.text = "Download"
        ent_button.set("class", ent_button.get("class")+" purchased_button")
  
  for installed_item in installed:
    inst_links = HTMLtree.findall(".//a[@class='item"+installed_item+"']")
    for inst_link in inst_links:
      inst_button = inst_link.find("div")
      inst_button.text = "Read"
      inst_button.set("class", inst_button.get("class")+" downloaded_button")
  
  print "Content-type: text/html\n"
  print etree.tostring(HTMLtree)
Ejemplo n.º 5
0
def initpositionsnosurf(params):
    """Initialize positions of fluid particles with no surface."""

    nparfl = params['nparfl']
    lboxx = params['lboxx']
    lboxy = params['lboxy']
    lboxz = params['lboxz']
    rcinit = params['rcinit']
    sameseed = params['sameseed']
    rcinitsq = rcinit**2.0
    pos = np.empty([nparfl, 3])

    # excluded region: no fluid particles will be placed in this
    # region.  We default to False so that fluid particles are placed
    # throughout the whole box.
    exregion = params.get('exregion', False)
    exxmin = params.get('exxmin', 0.0)
    exxmax = params.get('exxmax', 0.0)
    exymin = params.get('exymin', 0.0)
    exymax = params.get('exymax', 0.0)
    exzmin = params.get('exzmin', 0.0)
    exzmax = params.get('exzmax', 0.0)

    pos[:,0], pos[:,1], pos[:,2] = mcfuncs.\
                                   initpositionsnosurff(nparfl, lboxx,
                                                        lboxy, lboxz,
                                                        rcinitsq,
                                                        # excluded region
                                                        exregion,
                                                        exxmin, exxmax,
                                                        exymin, exymax,
                                                        exzmin, exzmax,
                                                        sameseed)

    return pos
Ejemplo n.º 6
0
    def test_get(self):

        # If the needed file doesn't exist, create it using params.setup()
        user_fn = "params/user_params.json"
        if not os.path.exists(user_fn):
            params.setup()

        # Check return types of request for multiple parameters
        p1, p2, p3, p4, p5 = params.get("VDD", "VSS", "tper", "cycles", "cap")
        self.assertTrue(isinstance(p1, float))
        self.assertTrue(isinstance(p2, int))
        self.assertTrue(isinstance(p3, float))
        self.assertTrue(isinstance(p4, int))
        self.assertTrue(isinstance(p5, float))

        # Check return type of request for one parameter
        p6 = params.get("VDD")
        self.assertTrue(isinstance(p6, float))
Ejemplo n.º 7
0
def agent(i, exp_queue, model_queue, metric_queue):
    device = torch.device('cuda:1')
    p = params.get()

    # --- Create environments ---
    env = gym.make('CartPole-v1')
    sdim = env.observation_space.shape
    adim = env.action_space.n

    # --- Create model ---
    model = Model(sdim[0], adim)

    # --- exp data ---
    states, statesn = [np.empty((p.HORIZON,) + sdim) for _ in range(2)]
    rewards, dones = [np.empty((p.HORIZON, 1)) for _ in range(2)]
    actions = np.empty((p.HORIZON, 1), dtype=np.int)

    episode_rewards = [0.]

    sn = env.reset()
    while True:
        # --- update model ---
        state_dict = model_queue.get()
        model.load_state_dict(state_dict)
        model.to(device)
        model.eval()

        # --- get exp ---
        for t in range(p.HORIZON):
            states[t] = sn.copy()
            actions[t] = model.action_sample(states[t])
            sn, rewards[t], dones[t], _ = env.step(actions[t][0])
            episode_rewards[-1] += rewards[t]

            if dones[t]:
                sn = env.reset()
                episode_rewards.append(0.)

            statesn[t] = sn.copy()

        # --- send exp ---
        exp_dict = {
            'states': states,
            'statesn': statesn,
            'rewards': rewards,
            'actions': actions,
            'dones': dones,
        }
        exp_queue.put(exp_dict)

        metric_dict = {
            'episode_rewards': episode_rewards[:-1],
        }
        metric_queue.put(metric_dict)
        episode_rewards = [0.]
Ejemplo n.º 8
0
    def build(self, network_file):
        with open(network_file, "r") as f:
            lines = f.readlines()
        HRS, LRS = params.get("HRS", "LRS")
        # TODO --> Fix the indexing here
        for line in lines[1:18]:
            line = line.replace("\n", "")
            line = line.split(" ")

            # Read in a network
            if line[0] == "+":
                if line[1] == "I":
                    print("N: I")
                elif line[1] == "O":
                    print("N: O")

                unique_id = line[2]
                threshold = line[3]
                # TODO --> Fix the threshold calculation here
                threshold = 599e-3
                x = line[4]
                y = line[5]
                z = line[6]
                self.neuron_dict[unique_id] = IAF(name=unique_id,
                                                  Vth=threshold,
                                                  rf=1)

                if line[1] == "I":
                    input_neuron = IN("Input",
                                      self.input_arrays[int(unique_id)])
                    input_synapse = TM(Mp=LRS,
                                       Mn=HRS,
                                       delay=0,
                                       pre=input_neuron,
                                       post=self.neuron_dict[unique_id])
                    self.synapse_list.append(input_synapse)
                    self.neuron_dict[unique_id].input_synapses.append(
                        input_synapse)
                else:
                    pass

            elif line[0] == "|":
                pre = self.neuron_dict[line[3]]
                post = self.neuron_dict[line[4]]
                delay = int(line[5])
                mbits = int(line[6])
                Mp = LRS if mbits & 1 == 1 else HRS
                Mn = HRS if (mbits >> 1) & 1 == 1 else LRS
                syn = TM(Mp, Mn, delay, pre, post)
                self.synapse_list.append(syn)
                post.input_synapses.append(syn)

            else:
                pass
Ejemplo n.º 9
0
def central(exp_queues, model_queues, metric_queues, test_queue, tb_queue):
    device = torch.device('cuda:1')
    p = params.get()

    # --- Create Model ---
    env = gym.make('CartPole-v1')
    sdim = env.observation_space.shape
    adim = env.action_space.n
    model = Model(sdim[0], adim)

    while True:
        # --- Synchronize model: send the model to each agent ---
        model.to(torch.device('cpu'))
        state_dict = model.state_dict()
        for i in range(p.NUM_AGENTS):
            model_queues[i].put(state_dict)
        model.to(device)
        model.train()

        # --- Receive, compute, shuffle exp ---
        exp = train_func.merge_exp(exp_queues)
        exp['values'] = model.values(exp['states'])
        exp['advs'], exp['returns'] = train_func.compute_adv(exp, p=p)

        # --- Update model ---
        results = train_func.train_model(model, exp, p.NUM_UPDATE, p=p)
        r = SimpleNamespace(**results)
        p.NUM_UPDATE += 1

        # --- Send to tensorboard ---
        tbs = []
        tbs.append((TB_SCALAR, 'Loss(updates)/Total', r.total, p.NUM_UPDATE))
        tbs.append((TB_SCALAR, 'Loss(updates)/Actor', r.actor, p.NUM_UPDATE))
        tbs.append((TB_SCALAR, 'Loss(updates)/Critic', r.critic, p.NUM_UPDATE))
        tbs.append((TB_SCALAR, 'Loss(updates)/Entropy Coef', r.ent_coefs, p.NUM_UPDATE))
        tbs.append((TB_SCALAR, 'Loss(updates)/Entropy', r.entropies, p.NUM_UPDATE))
        tbs.append((TB_SCALAR, 'Loss(updates)/Returns', np.mean(exp['returns']), p.NUM_UPDATE))
        tbs.append((TB_SCALAR, 'Loss(updates)/Advantage', np.mean(exp['advs']), p.NUM_UPDATE))
        for d in tbs:
            tb_queue.put(d)

        metric_dict = train_func.merge_metric(metric_queues)
        metric = SimpleNamespace(**metric_dict)
        for i in range(len(metric.episode_rewards)):
            tb_queue.put((TB_SCALAR, 'Episode/Rewards', metric.episode_rewards[i], p.NUM_EPISODE))
            p.NUM_EPISODE += 1

        # --- update model param ---
        model.params = p.__dict__

        # --- test model ---
        if test_queue.empty():
            model.to(torch.device('cpu'))
            test_queue.put(copy.deepcopy(model))
Ejemplo n.º 10
0
    def __init__(self, name="", refractory=0, cap=params.get("cap")):
        """Creates an Integrate-and-Fire neuron with the given parameters.

        Args:
            name - A string uniquely identifying the neuron.
            Vmem - The initial value of the membrane voltage.
            Vth  - The threshold of the neuron.
            rf   - The refractory period of the neuron (in cycles).
            cap  - The membrane capacitance of the neuron.
        """
        self.name = str(name)
        self.Vth = params.get("Vth")
        self.tper = params.get("tper")
        self.VDD = params.get("VDD")
        self.VSS = params.get("VSS")
        self.MID = (self.VDD - self.VSS)/2 + self.VSS
        self.cycles = params.get("cycles")
        self.Cmem = cap
        self.refractory = refractory
        self.refractory_cycles_left = 0

        # Neuron state information
        #self.Vmem = self.MID
        self.Vmem = np.ones(self.cycles) * self.MID
        self.fire = np.zeros(self.cycles)

        # Neuron firing flop
        self.firing_flop_state = 0
Ejemplo n.º 11
0
def handle_pm(event):
    url = params.get("Elasticsearch", "lora_pm")
    found, list_dict = elastic_serachDB(url)
    #app.logger.info("found : {} , Data: {}".format(found,list_dict[0]["pm25"]))
    time = list_dict[0]["time"]
    pm = list_dict[0]["pm25"]
    temp = list_dict[0]["temperature"]
    hum = list_dict[0]["humidity"]
    output_text = "Date: {}\n----------------------\nPM 2.5 = {} ug/m3\nHumidity= {} %\nTemperature = {} Celcius".format(
        format_time(time), pm, hum, temp)
    app.logger.info(output_text)
    detail_text = "More: https://totsmartcity.com/pm"
    line_bot_api.reply_message(
        event.reply_token,
        [TextSendMessage(text=output_text),
         TextSendMessage(text=detail_text)])
Ejemplo n.º 12
0
    def __init__(self,
                 Mp=params.get("LRS"),
                 Mn=params.get("HRS"),
                 delay=0,
                 pre=None,
                 post=None):
        self.Mp = Mp
        self.Mn = Mn
        self.delay = delay
        self.pre = pre
        self.post = post
        self.Gmax = 1 / params.get("LRS") - 1 / params.get("HRS")
        self.G = np.ones(params.get("cycles")) * self.Gmax
        self.activity = np.zeros(params.get("cycles"))

        self.VDD = params.get("VDD")
        self.VSS = params.get("VSS")
        self.GND = (self.VDD - self.VSS) / 2 + self.VSS
Ejemplo n.º 13
0
def auto_fill():
    """
    ============================================================================
     Description: AutoFill Excel-Questions Columns (Id, Valid, Priority, Date).
    ============================================================================
    """
    row_first = 2
    col_id = 1
    col_valid = 2
    col_priority = 3
    col_question = 4
    col_date = 7
    date = u_datetime.to_str(datetime.now(), 'yymmdd')
    id_last = params.get('id last')
    filepaths = u_excel.get_filepaths_questions()
    for xlsx in filepaths:
        changed = False
        excel = Excel(xlsx)
        row_last = excel.ws.max_row
        for row in range(row_first, row_last + 1):
            if excel.is_blank(row, col_question):
                continue
            # AutoFill Id
            if excel.is_blank(row, col_id):
                excel.set_value(row, col_id, id_last + 1)
                id_last += 1
                changed = True
            # AutoFill Valid
            if excel.is_blank(row, col_valid):
                excel.set_value(row, col_valid, 1)
                changed = True
            # AutoFill Priority
            if excel.is_blank(row, col_priority):
                excel.set_value(row, col_priority, 'A')
                changed = True
            # AutoFill Date
            if excel.is_blank(row, col_date):
                excel.set_value(row, col_date, date)
                changed = True
        excel.close()
        if changed:
            print(xlsx)
    params.set('id last', id_last)
Ejemplo n.º 14
0
    def build_from_file(self, network_file):
        with open(network_file, "r") as f:
            lines = f.readlines()
        HRS, LRS = params.get("HRS", "LRS")

        for line in lines[1:18]:
            line = line.replace("\n", "")
            line = line.split(" ")

            # Read a network
            if line[0] == "+":
                if line[1] == "I":
                    print("N: I")
                elif line[1] == "O":
                    print("N: O")

                unique_id = line[2]
            elif line[0] == "|":
                pass
            else:
                pass
Ejemplo n.º 15
0
size = comm.Get_size()
topology = params.calculate_topology(size)
cart = MPI.Intracomm(comm).Create_cart([topology[0], topology[1]],
                                       [True, True])
rank = cart.Get_rank()

# calculate neighbors
coord = cart.Get_coords(rank)
neighbor_coords = [[coord[0] - 1, coord[1]], [coord[0], coord[1] - 1],
                   [coord[0] + 1, coord[1]], [coord[0], coord[1] + 1]]
neighbors = set(map(cart.Get_cart_rank, neighbor_coords))
print('rank: {}, neighbors: {}'.format(rank, neighbors))

# read parameters
term_m, term_v, pop_size, f_out, f_model, migr_int, migr_size,\
        mut_prob, mut_eta, xover_prob, xover_eta = params.get()

# start multiple runs
start = time.time()
# -- setup algorithm --

# init evaluator
model = JspModel(f_model)
evaluator = JspEvaluator(model)
solution_length = model.solution_length()

# init GA
fitness_size = evaluator.metrics_count()
weights = tuple([-1 for _ in range(fitness_size)])
creator.create("FitnessMin", base.Fitness, weights=weights)
creator.create("Individual", JspSolution, fitness=creator.FitnessMin)
Ejemplo n.º 16
0
import requests
from werkzeug.middleware.proxy_fix import ProxyFix

from linebot import (LineBotApi, WebhookHandler)
from linebot.exceptions import (InvalidSignatureError)
from linebot.models import (
    MessageEvent,
    TextMessage,
    TextSendMessage,
    ImageSendMessage,
)

app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_host=1, x_proto=1)

token = params.get("Line", "token")
secret = params.get("Line", "secret")
line_bot_api = LineBotApi(token)
handler = WebhookHandler(secret)


@app.route('/')
def main():
    return "Python Line!"


@app.route('/test')
def test():
    return "Python Test!"

Ejemplo n.º 17
0
            fits = map(lambda x: operators.calc_fitness(x, self.evaluator),
                       offspring)

            for fit, i_off in zip(fits, offspring):
                i_off.fitness.values = fit

            # select individuals for the next generation
            offspring.extend(population)
            population = toolbox.select(offspring, len(population))

        return population


if __name__ == '__main__':

    term_m, term_v, pop, f_out, f_model, _, _, mut_pb,\
        mut_eta, xover_pb, xover_eta = params.get()

    start = time.time()

    alg = NSGA2(f_model)
    population = alg.optimize(term_m, term_v, pop, mut_pb, mut_eta, xover_pb,
                              xover_eta)

    duration = time.time() - start

    output.write_pareto_front(population, f_out)

    with open('{}.time'.format(f_out), 'a') as myfile:
        myfile.write('{}\n'.format(duration))
Ejemplo n.º 18
0
import copy

import params
import train_func

from types import SimpleNamespace
from torch.utils.tensorboard import SummaryWriter

from model import Model

TB_SCALAR       = 'scalar'
TB_HIST         = 'histogram'

torch.set_default_tensor_type(torch.DoubleTensor)

p = params.get()

def tensorboard(tb_queue):
    stime = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
    dir_path = '../tensorboard/' + 'CartPole-A2C_' + stime
    writer = SummaryWriter(dir_path)

    writer.add_text('hparams', str(params.data), 0)

    while True:
        tbtype, tag, value, step = tb_queue.get()
        if tbtype == TB_SCALAR:
            writer.add_scalar(tag, value, step)
        elif tbtype == TB_HIST:
            writer.add_histogram(tag, value, step)
Ejemplo n.º 19
0
def getAPIKey():
    apikey = params.get('AI4Thai', 'Apikey')
    return apikey
Ejemplo n.º 20
0
if (args.image):
    # Open the image file
    if not os.path.isfile(args.image):
        print("Input image file ", args.image, " doesn't exist")
        sys.exit(1)
    video_source = args.image
    outputFile = args.image[:-4] + '_yolo_out_py.jpg'
elif (args.video):
    # Open the video file
    if not os.path.isfile(args.video):
        print("Input video file ", args.video, " doesn't exist")
        sys.exit(1)
    video_source = args.video
    outputFile = args.video[:-4] + '_yolo_out_py.avi'
elif (args.rtsp):
    rtsp_stream = params.get('RTSP', args.rtsp)
    video_source = rtsp_stream
    outputFile = args.rtsp + '_yolo_out_py.avi'
else:
    # Webcam input
    video_source = 0

cap = cv.VideoCapture(video_source)

# Get the video writer initialized to save the output video
#if (not args.image):
# vid_writer = cv.VideoWriter(outputFile, cv.VideoWriter_fourcc('M','J','P','G'), 30, (round(cap.get(cv.CAP_PROP_FRAME_WIDTH)),round(cap.get(cv.CAP_PROP_FRAME_HEIGHT))))

skipFrame = int(params.get(
    "RTSP", "skip"))  # input number of frame to be skipped processing
frameNo = 0
Ejemplo n.º 21
0
import params
import os
import pandas as pd
import scipy.io

print "begin"
path = "/media/carlos/CE2CDDEF2CDDD317/concursos/cancer/stage1_100_100_200_test/"
files = [path + file for file in os.listdir(path)]
features_file = "data/resp_18000"
sub_file = "data/stage1_submission_6.csv"
    
r = []
r.append(["id", "cancer"])

features = scipy.io.loadmat(features_file)
parameters = params.get()
preds = model.eval_conv(files, parameters, features)

a = []
for pred in preds:
    name, prob = pred
    d = {}
    d["id"] = name.split("/")[-1:][0].split("_")[0]
    d["cancer"] = prob[0][1]
    a.append(d)

df = pd.DataFrame(a)
df = df.groupby("id").mean()
print df
df.to_csv(sub_file)
Ejemplo n.º 22
0
 def reset(self):
     self.G = np.ones(params.get("cycles")) * self.Gmax
     self.activity = np.zeros(params.get("cycles"))
Ejemplo n.º 23
0
# Import Elasticsearch package
from elasticsearch import Elasticsearch
import params

database = params.get('Elasticsearch', 'database')
port = params.get('Elasticsearch', 'port')

#Create index lora by
#http://localhost:9200/lora


# Connect to the elastic cluster
def connect():
    es = Elasticsearch([{'host': database, 'port': port}])
    #print(es)
    return es


# Insert
def insert(es, data, indexName="lora"):
    #print(data)
    #Now let's store this document in Elasticsearch
    res = es.index(index=indexName, doc_type="lora", body=data)
    return res
def main():
  # Pull parameters from URL and POST data
  store_id = params.get().getvalue("storeid") # FIXME : not final

  if int(store_id) in c.STORES:
    store_name = c.STORES[store_id][0]
    bundle_id = c.STORES[store_id][1]
  else:
    raise errors.StoreError()

  # Bring in the HTML template.
  parser = etree.HTMLParser()
  HTMLtree = etree.parse("/var/www/html/"+store_name+"_template.html", parser)
  legacyHTMLtree = etree.parse("/var/www/html/legacy_"+store_name+"_template.html", parser)


  # Create db connection for fetching promotions.
  dB = db_connect.OpenDB()
  
  # Create and make query to retrieve data for the currently stored issues.
  items_query = """
    SELECT *
    FROM content_items
    WHERE bundle_id = '%s'
    ORDER BY publication_date DESC
  """ % (bundle_id)
  items_results = dB.SelectQuery(items_query)
  
  # Create and make query to retrieve preview image urls.
  previews_query = """SELECT content_item_id, image_url FROM previews"""
  previews_results = dB.SelectQuery(previews_query)
  
  # Close db connection.
  dB.Close()


  # Create a workable dictionary of the preview image query.
  previews_dict = datautils.workablepreviews(previews_results)

  # Pop off the first item in the result as the featured item.
  featured_item = datautils.singleissue(items_results.pop(0), previews_dict)
  
  # Get a list of objects from the result, allowing the issue metadata to be 
  # accessed by name rather than index in the query results.
  previous_issues = datautils.listissues(items_results, previews_dict)


  # Find the featured section:
  featured_holder = HTMLtree.find(".//div[@id='featured_issue']")
  legacy_featured_holder = legacyHTMLtree.find(".//div[@id='featured_issue']")
  
  # Find the preview section:
  issue_holder = HTMLtree.find(".//div[@id='issue_float_holder']")
  legacy_issue_holder = legacyHTMLtree.find(".//div[@id='issue_float_holder']")
  
  # Find the lightbox section:
  details_holder = HTMLtree.find(".//div[@id='hide_details']")
  legacy_details_holder = legacyHTMLtree.find(".//div[@id='hide_details']")


  # Build the featured issue section.
  featured_id = str(featured_item.info["id"])

  if featured_item.info["price_tier"] > 0:
    featured_action_string = "Buy $"+str(float(featured_item.info["price_tier"]) - 0.01)
  else:
    featured_action_string = "Download"

  featured_holder.append(
    A(
      IMG(
        ID("featured_cover"),
        SRC(featured_item.info["cover_url"])
      ),
      HREF("#frame"+featured_id),
      CLASS("show_lightbox")
    )
  )
  
  if featured_item.info["subtitle"] is not None:
    featured_subtitle = makecleanelement("<p>",featured_item.info["subtitle"],"</p>")

  featured_holder.append(
    DIV(
      DIV(
        H1(makeunicode(featured_item.info["title"])),
        featured_subtitle,
        ID("featured_desc")
      ),
      DIV(
        A(
          DIV(
            featured_action_string,
            CLASS("feature_button","action_button","buy_button")
          ),
          HREF("sm://itemOpen/"+featured_id),
          CLASS("item"+featured_id)
        ),
        A(
          DIV(
            "Details",
            CLASS("feature_button","action_button","details_button")
          ),
          HREF("#frame"+featured_id),
          CLASS("show_lightbox")
        ),
        ID("featured_buttons")
      ),
      ID("featured_details")
    )
  )
  
  details_holder.append(builders.buildlightbox(featured_item, featured_action_string))
  

  # Build the list of previous issues and the detail panels for each.  
  for issue in previous_issues:
    previous_id = str(issue.info["id"])

    if issue.info["price_tier"] > 0:
      issue_action_string = "Buy $"+str(float(issue.info["price_tier"]) - 0.01)
    else:
      issue_action_string = "Download"
  
    # Append an issue element to the main scrollable list of previous issues.
    issue_holder.append(
      DIV(
        DIV(
          A(
            IMG(SRC(issue.info["thumbnail_url"])),
            HREF("#frame"+previous_id),
            CLASS("show_lightbox")
          ),
          CLASS("picture")
        ),
        DIV(makeunicode(trimtext(issue.info["title"], 20)), CLASS("desc_picture")),
        DIV(
          A(
            DIV(
              issue_action_string,
              CLASS("left_button","action_button","buy_button")
            ),
            HREF("sm://itemOpen/"+previous_id),
            CLASS("item"+previous_id)
          ),
          A(
            DIV(
              "Details",
              CLASS("right_button","action_button","details_button")
            ),
            HREF("#frame"+previous_id),
            CLASS("show_lightbox")
          ),
          CLASS("button_holder")          
        ),
        CLASS("issue")
      )
    )
    
    details_holder.append(builders.buildlightbox(issue, issue_action_string))

  legacy_featured_holder = featured_holder
  legacy_issue_holder = issue_holder
  legacy_details_holder = details_holder


  # Write the generated HTML to a temporary file. Using temporary files prevents race conditions.
  data = etree.tostring(HTMLtree)
  tmp = tempfile.mkstemp(dir=TEMP_BASE)
  legacy_tmp = tempfile.mkstemp(dir=TEMP_BASE)

  bytes_written = os.write(tmp[0], data)
  os.write(legacy_tmp[0], data)
  os.close(tmp[0])
  os.close(legacy_tmp[0])

  # Move the temporary file to the final URL.
  # FIXME : do we need to fix permissions here?
  os.rename(tmp[1], STORE_BASE % (store_name))
  os.rename(legacy_tmp[1], LEGACY_BASE % (store_name))

  return bytes_written