def modify_fstab(directory):
  # Ignore all contents for Google Edition
  for i in [ 'fstab.qcom', 'fstab.jgedlte' ]:
    lines = c.get_lines_from_file(directory, i)

    has_cache_line = False

    f = c.open_file(directory, i, c.WRITE)
    for line in lines:
      if re.search(r"^/dev[a-zA-Z0-9/\._-]+\s+/system\s+.*$", line):
        c.write(f, "/dev/block/platform/msm_sdcc.1/by-name/system /raw-system ext4 ro,errors=panic wait\n")

      elif re.search(r"^/dev[^\s]+\s+/cache\s+.*$", line):
        c.write(f, "/dev/block/platform/msm_sdcc.1/by-name/cache /raw-cache ext4 nosuid,nodev,barrier=1 wait,check\n")
        has_cache_line = True

      elif re.search(r"^/dev[^\s]+\s+/data\s+.*$", line):
        c.write(f, "/dev/block/platform/msm_sdcc.1/by-name/userdata /raw-data ext4 nosuid,nodev,noatime,noauto_da_alloc,discard,journal_async_commit,errors=panic wait,check,encryptable=footer\n")

      else:
        c.write(f, line)

    if not has_cache_line:
      c.write(f, "/dev/block/platform/msm_sdcc.1/by-name/cache /raw-cache ext4 nosuid,nodev,barrier=1 wait,check\n")

    f.close()
Esempio n. 2
0
def modify_fstab(directory):
  lines = c.get_lines_from_file(directory, 'fstab.qcom')

  # For Android 4.2 ROMs
  has_cache_line = False

  f = c.open_file(directory, 'fstab.qcom', c.WRITE)
  for line in lines:
    if re.search(r"^/dev[a-zA-Z0-9/\._-]+\s+/system\s+.*$", line):
      c.write(f, re.sub("\s/system\s", " /raw-system ", line))

    elif re.search(r"^/dev[^\s]+\s+/cache\s+.*$", line):
      c.write(f, re.sub("\s/cache\s", " /raw-cache ", line))
      has_cache_line = True

    elif re.search(r"^/dev[^\s]+\s+/data\s+.*$", line):
      c.write(f, re.sub("\s/data\s", " /raw-data ", line))

    else:
      c.write(f, line)

  if not has_cache_line:
    c.write(f, "/dev/block/platform/msm_sdcc.1/by-name/cache          /raw-cache       ext4    nosuid,nodev,barrier=1 wait,check")

  f.close()
def modify_MSM8960_lpm_rc(directory):
  lines = c.get_lines_from_file(directory, 'MSM8960_lpm.rc')

  f = c.open_file(directory, 'MSM8960_lpm.rc', c.WRITE)
  for line in lines:
    if re.search(r"^\s+mount.*/cache.*$", line):
      c.write(f, re.sub(r"^", "#", line))

    else:
      c.write(f, line)

  f.close()
Esempio n. 4
0
def modify_init_qcom_rc(directory):
  lines = c.get_lines_from_file(directory, 'init.qcom.rc')

  f = c.open_file(directory, 'init.qcom.rc', c.WRITE)
  for line in lines:
    # Change /data/media to /raw-data/media
    if re.search(r"/data/media(\s|$)", line):
      c.write(f, re.sub('/data/media', '/raw-data/media', line))

    else:
      c.write(f, line)

  f.close()
Esempio n. 5
0
    def sync_cc_to_git(self):
        self.since = self.get_since()
        self.new_since = common.get_since_utc_time(self.core_config.get_since_delta());
        
        start_history = self.get_start_history(self.since)
        
        logger = logging.getLogger(__name__)
        logger_level = logger.getEffectiveLevel()
        if logger_level <= logging.DEBUG:
            common.write(join(self.ct.cc_dir, '.git', 'lsh_short.log'), start_history.encode(common.ENCODING))
            
        view_info = self.parse_start_history(start_history)
        
        # update cc view
        self.ct.update(True, join(self.ct.cc_dir, '.git', 'cc.updt'))
        if not self.repo.is_dirty():
            return False
        
        # clean the files in cc view/gcc to the status that not updated
        self.repo.git.checkout('.')
        self.repo.git.clean('-f', '-d', '-q')
        
        # start to get the history to do real synchronization from cc view to git repo.
        history = self.get_updated_history(self.since)
        if logger_level <= logging.DEBUG:
            common.write(join(self.ct.cc_dir, '.git', 'lsh_long.log'), history.encode(common.ENCODING))
        
        cs = self.parse_updated_history(history, view_info)
        cs = reversed(cs)
        cs = self.merge_history(cs)

        if not len(cs):
            return False
        
        self.log_groups(cs, logger)
    
        self.commit(cs)
    
        # update the sync.config file.
        self.update_since()
        
        return True
def write_informer(*, desc, title, expr, mode=""):
    com.write(f"""\
    ..
    .. list-table::
        :stub-columns: 1
        :widths: 1 50

        * - Описание
          - {com.reindent(3, desc)}
        * - Заголовок
          - ``{title}``
        * - Условие
          - ``{expr}``
    """)
    if not mode:
        return
    com.write(f"""\
    ..
        * - Тип
          - ``{mode}``
    """)
Esempio n. 7
0
def modify_init_qcom_rc(directory):
  lines = c.get_lines_from_file(directory, 'init.qcom.rc')

  f = c.open_file(directory, 'init.qcom.rc', c.WRITE)
  for line in lines:
    if 'export EMULATED_STORAGE_TARGET' in line:
      c.write(f, line)
      c.write(f, c.whitespace(line) + "export EXTERNAL_SD /storage/sdcard1\n")

    # Change /data/media to /raw-data/media
    elif re.search(r"/data/media(\s|$)", line):
      c.write(f, re.sub('/data/media', '/raw-data/media', line))

    else:
      c.write(f, line)

  f.close()
Esempio n. 8
0
    def open(self):
        LOG.debug("the client %s connected with compute socket", self.request.remote_ip)
        if self not in DashBoardHandler.clients:
            DashBoardHandler.clients.add(self)
        self.stream.set_nodelay(True)
        try:
            alarm_data = yield DashBoardHandler.get_alarm({})
            write(self, alarm_data.get("records"), "alarm")

            stat_data = yield DashBoardHandler.get_stat({})
            write(self, stat_data.get("records"), "stat")

            top_data = yield DashBoardHandler.get_top({})
            write(self, top_data.get("response"), "top")

            day = datetimeUtils.get_now_date(datetimeUtils.YEAR_MONTH_DAY)
            log_data = yield query_operation_log(region=CONF.keystone.region_name,
                                                 start_time=day + " 00:00:00",
                                                 limit=15)
            write(self, log_data, "log")
        except Exception, e:
            LOG.error("open socket push message error %s", e)
Esempio n. 9
0
            timestep_chunks.append((x, (chunk_start, chunk_end)))

    # parallelize the timestep chunks
    chunks = sc.parallelize(timestep_chunks, config.partitions)

    # Now calculate means
    chunks = chunks.map(calculate_partial_sums)

    # Now reduce
    sums = chunks.reduceByKey(reduce_partial_sums)

    # Now finally calculate the means
    timestep_means = sums.map(calculate_mean)

    timestep_means = sorted(timestep_means.collect())

    if config.output_path:
        timestep_means = np.ma.asarray([x[1] for x in timestep_means])
        path = os.path.join(
            config.output_path,
            os.path.basename(config.datafile_path).replace('.nc', '_means.nc'))
        write(path, data, '%s_mean' % config.parameter, timestep_means)
    else:
        for (_, m) in timestep_means:
            print(m[~m.mask])

    end_time = time.time()

    print "Time: %f" % (end_time - start_time)
Esempio n. 10
0
def merge(old_hash, new):
    if len(old_hash) == 0:
        return new
    else:
        return new

# === START === #

start_time = time.time()
config = read_configuration()
old_catalog_hash = make_old_catalog_hash(config.catalog_file_name)
resolved_path = Path(config.catalog_directory).expanduser().resolve()
print("Reading {}".format(resolved_path))
new_catalog = make_catalog(resolved_path, config.flags)
catalog = merge(old_catalog_hash, new_catalog)

# writing to csv file
write(config.catalog_file_name, catalog)

# done in ...
time_total = time.time() - start_time
time_minutes = int(time_total / 60)
time_seconds = int(time_total % 60)
mili_seconds = int(time_total * 1000)
if time_minutes > 0:
    print("Done in {}m {}s".format(time_minutes, time_seconds))
elif time_seconds > 0:
    print("Done in {}s".format(time_seconds))
else:
    print("Done in {}ms".format(mili_seconds))
    # Now calculate means
    chunks = chunks.map(calculate_partial_sums)

    # Now reduce
    sums = chunks.reduceByKey(reduce_partial_sums)

    # Now finally calculate the means
    timestep_means = sums.map(calculate_mean)

    timestep_means = sorted(timestep_means.collect())

    if config.output_path:
        timestep_means = np.ma.asarray([x[1] for x in timestep_means])
        path = os.path.join(config.output_path, os.path.basename(config.datafile_path).replace('.nc', '_means.nc'))
        write(path, data, '%s_mean' % config.parameter, timestep_means)
    else:
        for (_, m) in timestep_means:
            print(m[~m.mask])

    end_time = time.time()

    print "Time: %f" % (end_time - start_time)







Esempio n. 12
0
def process_file(config, file_path):
    nc_file = Dataset(file_path)
    means = calculate_means(nc_file, config.parameter, config.timesteps)
    path = os.path.join(config.output_path, os.path.basename(file_path).replace('.nc', '_means.nc'))
    write(path, nc_file, '%s_mean' % config.parameter, means)
Esempio n. 13
0
                        type=int,
                        help='Number of timesteps to average over')
    parser.add_argument('-o', '--output_path')

    config = parser.parse_args()

    if os.path.isdir(config.data_path):
        files = glob.glob(os.path.join(config.data_path, '*.nc'))
    else:
        files = [config.data_path]

    start = time.time()

    for f in files:
        print f
        nc_file = Dataset(f)
        means = calculate_means(nc_file, config.parameter, config.timesteps)

        if config.output_path:
            path = os.path.join(
                config.output_path,
                os.path.basename(f).replace('.nc', '_means.nc'))
            write(path, nc_file, '%s_mean' % config.parameter, means)
        else:
            for m in means:
                print(m[~m.mask])

    end = time.time()

    print "Time: %f" % (end - start)
Esempio n. 14
0
    parser = argparse.ArgumentParser()
    parser.add_argument('-d', '--data_path', required=True)
    parser.add_argument('-p', '--parameter', required=True)
    parser.add_argument('-n', '--timesteps', required=True, type=int, help='Number of timesteps to average over')
    parser.add_argument('-o', '--output_path')

    config = parser.parse_args()

    if os.path.isdir(config.data_path):
        files = glob.glob(os.path.join(config.data_path, '*.nc'))
    else:
        files =  [config.data_path]

    start = time.time()

    for f in files:
        print f
        nc_file = Dataset(f)
        means = calculate_means(nc_file, config.parameter, config.timesteps)

        if config.output_path:
            path = os.path.join(config.output_path, os.path.basename(f).replace('.nc', '_means.nc'))
            write(path, nc_file, '%s_mean' % config.parameter, means)
        else:
            for m in means:
                print(m[~m.mask])

    end = time.time()

    print "Time: %f" % (end - start)
Esempio n. 15
0
def modify_init_rc(directory):
  lines = c.get_lines_from_file(directory, 'init.rc')

  f = c.open_file(directory, 'init.rc', c.WRITE)
  for line in lines:
    if 'export ANDROID_ROOT' in line:
      c.write(f, line)
      c.write(f, c.whitespace(line) + "export ANDROID_CACHE /cache\n")

    elif re.search(r"mkdir /system(\s|$)", line):
      c.write(f, line)
      c.write(f, re.sub("/system", "/raw-system", line))

    elif re.search(r"mkdir /data(\s|$)", line):
      c.write(f, line)
      c.write(f, re.sub("/data", "/raw-data", line))

    elif re.search(r"mkdir /cache(\s|$)", line):
      c.write(f, line)
      c.write(f, re.sub("/cache", "/raw-cache", line))

    elif 'yaffs2' in line:
      c.write(f, re.sub(r"^", "#", line))

    else:
      c.write(f, line)

  f.close()
Esempio n. 16
0
def modify_init_target_rc(directory):
  lines = c.get_lines_from_file(directory, 'init.target.rc')

  previous_line = ""

  f = c.open_file(directory, 'init.target.rc', c.WRITE)
  for line in lines:
    if re.search(r"^\s+wait\s+/dev/.*/cache.*$", line):
      c.write(f, re.sub(r"^", "#", line))

    elif re.search(r"^\s+check_fs\s+/dev/.*/cache.*$", line):
      c.write(f, re.sub(r"^", "#", line))

    elif re.search(r"^\s+mount\s+ext4\s+/dev/.*/cache.*$", line):
      c.write(f, re.sub(r"^", "#", line))

    elif re.search(r"^\s+mount_all\s+fstab.qcom.*$", line) and \
        re.search(r"^on\s+fs\s*$", previous_line):
      c.write(f, line)
      c.write(f, c.whitespace(line) + "exec /sbin/busybox-static sh /init.dualboot.mounting.sh\n")

    else:
      c.write(f, line)

    previous_line = line

  f.close()
Esempio n. 17
0
#!/usr/bin/env python3

import sys
import os
from common import read, write, Const, append_suffix

if len(sys.argv) != 3:
    print("Use ./prune.py <dir-to-catalog> <catalog_file.csv>")
    exit(1)

catalog_directory = sys.argv[1]
catalog_file_name = sys.argv[2]

print("Reading {}/{}".format(os.getcwd(), catalog_file_name))

catalog = read(catalog_file_name)
catalog_pruned = []

for file in catalog:
    full_path = os.path.join(catalog_directory, file["path"],
                             file["file_name"])
    if os.path.exists(full_path):
        catalog_pruned.append(file)
    else:
        print(full_path + " was pruned")

catalog_pruned_file_name = append_suffix(catalog_file_name, Const.pruned)
write(catalog_pruned_file_name, catalog_pruned)