Ejemplo n.º 1
0
def get_conf(url, download_dir, useThreads):
   data_path   = download_dir + '/download_data'
   json_path   = download_dir + '/download.json'
   hashes_path = download_dir + '/hashes.json'
   hashes      = None
   resuming    = True

   threadQueue = None
   if useThreads:
      threadQueue = ThreadQueue()

   if not file_exists(download_dir):
      os.mkdir(download_dir)

   conf_new = json_from_url(url)
   conf = shellutils.read_json(json_path)
   if conf is None:
      logger.info('No local download.json file found. Downloading download.json from server.')
      conf = conf_new
      resuming = False
   else:
      logger.info('download.json read successfully')

   if not file_exists(hashes_path):
      logger.info('No local hashes.json file found.')
      resuming = False
   else:
      logger.info('Local hashes file found.')

   new_conf_ver = int(conf_new['version'])
   conf_ver = int(conf['version'])
   if new_conf_ver > conf_ver or new_conf_ver == -1:
      conf = conf_new
      logger.info('download.json on server is newer than local version. Restarting download process.')
      resuming = False
   else:
      logger.info('Local download.json is the newest version.')
   #have good conf now

   raw_f_name = conf['raw-file']
   raw_path = download_dir + '/' + raw_f_name
   raw_url = '/'.join(url.split('/')[:-1]) + '/' + raw_f_name
   #bro_url = raw_url + '.brotli'
   logger.info('raw file url: %s' % raw_url)

   #compress = int(conf['compression'])
   #if compress != 0 and compress != 1:
   #   logger.critical('Unsupported compression type: %i.' % compress)
   #if compress == 1:
   #   import brotli
   #   url_to_download = bro_url

   return conf, raw_path, json_path, data_path, hashes_path, hashes, resuming, threadQueue, raw_url
Ejemplo n.º 2
0
 def get_working_dir(self):
     if not self.is_running():
         return None
     path = u.expand_link(u.join('/proc', self.pid, 'cwd'))
     if u.file_exists(path):
         return path
     else:
         return None
Ejemplo n.º 3
0
 def get_working_dir(self):
    if not self.is_running():
       return None
    path = u.expand_link(u.join('/proc', self.pid, 'cwd'))
    if u.file_exists(path):
       return path
    else:
       return None
Ejemplo n.º 4
0
def download(json_url, download_dir, onProgress, useThreads=False): #onComplete?
   conf, raw_path, json_path, data_path, hashes_path, hashes, resuming, threadQueue, raw_url = get_conf(json_url, download_dir, useThreads)

   write_block = write_block_
   if threadQueue is not None:
      write_block = lambda *args: threadQueue.add_task(write_block_, *args)

   if not resuming:
      shellutils.write_json(json_path, conf)
      hashes = json_from_url(json_url + '.hashes')
      shellutils.write_json(hashes_path, hashes)
      if file_exists(raw_path):
         shellutils.rm(raw_path)
   else:
      hashes = shellutils.read_json(hashes_path)

   progress = conf['progress']
   num_hashes = conf['num-hashes']
   blk_size = conf['block-size']

   final_file = open(raw_path, 'ab')
   while progress < num_hashes:
      if bad_block is not None:
         msg = 'Bad hash of download chunk %s' % bad_block
         logger.critical(msg)
         raise Status(status.DOWNLOAD, msg)

      offset = progress * blk_size
      end = offset + blk_size - 1

      header = { 'Range': 'bytes=%d-%d' % (offset, end) }
      #Stream=False makes it faster
      r = requests.get(raw_url, headers=header, stream=False, verify=True, allow_redirects=True)

      if r.status_code != 206:
         msg = 'Wrong code for getting chunk. Got %i' % r.status_code
         raise Status(status.DOWNLOAD, msg)
      data = r.content
      r.close()
      write_block(data, hashes, final_file, offset, conf, progress, json_path)
      progress = progress + 1
      onProgress(progress, num_hashes)

   if threadQueue is not None:
      logger.info('Waiting on writer threads')
      threadQueue.join()
      logger.info('Writer threads are done')

   #final_file.flush()
   final_file.close()

   if bad_block is None:
      logger.info('Download done!')
      return conf, raw_path, json_path
   else:
      msg = 'Have bad block %s' % bad_block
      logger.info(msg)
      raise Status(status.DOWNLOAD, msg)
Ejemplo n.º 5
0
 def get_exe_path(self):
     if not self.is_running():
         return None
     try:
         path = u.expand_link(u.join('/proc', self.pid, 'exe'))
     except:
         return None
     if u.file_exists(path):
         return path
     else:
         return None
Ejemplo n.º 6
0
 def get_exe_path(self):
    if not self.is_running():
       return None
    try:
       path = u.expand_link(u.join('/proc', self.pid, 'exe'))
    except:
       return None
    if u.file_exists(path):
       return path
    else:
       return None
Ejemplo n.º 7
0
def get_conf(serv_url, download_dir):
   if not file_exists(download_dir):
      shellutils.mkdir(download_dir)

   ret = get_latest_json(serv_url, download_dir, conf_fname)
   conf, latest_conf = ret
   ret = get_latest_json(serv_url, download_dir, hashes_fname)
   hashes, latest_hashes  = ret

   resuming = False
   if latest_conf and latest_hashes:
      resuming = True
      logger.info('Everything is up to date. Resuming.')
   else:
      logger.info('Files outdated. Restarting.')

   raw_f_name = conf['raw-file']
   raw_path = join(download_dir, raw_f_name)

   raw_url = urljoin(serv_url, raw_f_name)
   logger.info('raw file url: %s' % raw_url)

   #todo: left here
   return conf, raw_path, json_path, data_path, hashes_path, hashes, resuming, threadQueue, raw_url
Ejemplo n.º 8
0
inputs = {
    #'DSC_5747.MOV' : 'Hack1_income_vs_assets.mp4',
    #'DSC_5748.MOV' : 'Hack2_habits.mp4',
    #'DSC_5751.MOV' : 'Hack3_revocable_living_trust.mp4',
    #'DSC_5752.MOV' : 'Hack4_bigfoot.mp4',
    'DSC_5754.MOV': 'Hack5_401K_help.mp4',
    #'DSC_5755.MOV' : 'Hack6_cost_segregation.mp4',
    #'DSC_5756.MOV' : 'Hack7_tax_14_day.mp4',
    'DSC_5758.MOV': 'Hack8_kiddie_tax.mp4',
    #'DSC_5759.MOV' : 'Hack9_medicaid_nursing_home.mp4'
}

bad = False
for in_name in inputs:
    if not shellutils.file_exists(in_name):
        bad = True
        print(in_name)

if bad:
    sys.exit()


def print_out(line):
    #print(line)
    pass


convert_cmd = sh.Command('./convert.sh')

for in_name in inputs:
Ejemplo n.º 9
0
def download(url, download_dir, onProgress, useThreads=False):
    conf, raw_path, json_path, data_path, hashes_path, hashes, resuming, threadQueue, url_to_download = get_conf(
        url, download_dir, useThreads)

    if not resuming:
        if file_exists(data_path):
            shutil.rmtree(data_path)
        os.mkdir(data_path)
        write_conf(json_path, conf)
        hashes = json_from_url(url + '.hashes')
        write_conf(hashes_path, hashes)
    else:
        hashes = read_conf(hashes_path)

    chunk_status, progress_tracker = check_data(data_path, hashes)
    block_size = conf['block-size']
    num_hashes = conf['num-hashes']
    download_done = False

    while not download_done:
        for x in chunk_status:
            if x[1] is True:
                continue

            start = block_size * get_chunk_index(chunk_status, x[0])
            end = start + block_size - 1
            resume_header = {'Range': 'bytes=%d-%d' % (start, end)}
            r = requests.get(url_to_download,
                             headers=resume_header,
                             stream=True,
                             verify=True,
                             allow_redirects=True)

            if r.status_code != 206:
                err = 'Could not find update file on server'
                logger.critical(err)
                raise Exception(err)

            data = b''
            for c in r.iter_content(block_size):
                data += c

            #if compress == 1:
            #   data = brotli.decompress(data)
            chunk_hash = check_and_write_chunk(hashes, x[0], data, data_path,
                                               threadQueue)
            if chunk_hash is not None:
                set_chunk_status(chunk_status, chunk_hash, True)
                onProgress(progress_tracker, num_hashes)
                progress_tracker = progress_tracker + 1
            r.close()

        download_done = True
        for x in chunk_status:  #chunk_status.items()
            if x[1] is False:
                download_done = False

    logger.info('Download done! Now putting stuff together')
    if threadQueue is not None:
        threadQueue.join()
    with open(raw_path, 'wb') as final_file:
        for x in hashes:
            with open(data_path + '/' + str(x), 'rb') as chunk:
                final_file.write(chunk.read())

    return conf, raw_path, json_path