Ejemplo n.º 1
0
def cmd_parse(args: argparse.Namespace):
    uri = args.uri
    _all = args.all

    fp = None
    if re.match(r"^https?://", uri):
        try:
            import httpio
        except ImportError:
            print("Install missing lib: httpip")
            retcode = subprocess.call(
                [sys.executable, '-m', 'pip', 'install', '-U', 'httpio'])
            assert retcode == 0
            import httpio

        fp = httpio.open(uri, block_size=-1)
    else:
        assert os.path.isfile(uri)
        fp = open(uri, 'rb')

    try:
        ir = IPAReader(fp)
        ir.dump_info(all=_all)
    finally:
        fp.close()
def main():
    # ir = IPAReader("../??.ipa")
    # print(ir.get_bundle_id())
    url = "???"  # FIXME(ssx): here need a public ipa package
    import httpio
    with httpio.open(url, block_size=-1) as fp:
        ir = IPAReader(fp)
        print(ir.get_bundle_id())
Ejemplo n.º 3
0
 def _open_remote_attachments(self):
     with httpio.open(self.url) as remote_file:
         with zipfile.ZipFile(remote_file) as zipf:
             name_list = zipf.namelist()
             # any pdf may be an attachment, we save all the references for the duration of document import
             pdf_names = (x for x in name_list if x.endswith('.pdf'))
             while pdf_names:
                 yield zipf.open(next(pdf_names))
Ejemplo n.º 4
0
def read_spectra(url, user, pwd, targetid, expid, fiber, mask, ivar):
    kw = dict(auth=(user, pwd), verify=False)
    block_size = 2880 * 10  # caching block
    with httpio.open(url, block_size=block_size, **kw) as fp:
        if url in si.cache:
            fp._cache = si.cache[url]
        hdus = pyfits.open(fp)

        ftab = atpy.Table(hdus['FIBERMAP'].data)

        if expid is not None:
            xind = ftab['EXPID'] == expid
        else:
            xind = np.ones(len(ftab), dtype=bool)
        if targetid is not None:
            xids = np.nonzero((ftab['TARGETID'] == targetid) & xind)[0]
        else:
            xids = np.nonzero((ftab['FIBER'] == fiber) & xind)[0]
        if len(xids) == 0:
            print('no spectra')
            return []

        waves = {}
        for arm in 'BRZ':
            waves[arm] = hdus[arm + '_WAVELENGTH'].data

        fluxes = {}
        for arm in 'BRZ':
            fluxes[arm] = hdus[arm + '_FLUX'].section

        masks = {}
        if mask:
            for arm in 'BRZ':
                masks[arm] = hdus[arm + '_MASK'].section

        ivars = {}
        if ivar:
            for arm in 'BRZ':
                ivars[arm] = hdus[arm + '_IVAR'].section

        rets = []
        for xid in xids:
            ret = {}
            for arm in 'BRZ':
                ret[arm.lower() + '_wavelength'] = waves[arm]
                ret[arm.lower() + '_flux'] = fluxes[arm][xid, :]
            if mask:
                for arm in 'BRZ':
                    ret[arm.lower() + '_mask'] = masks[arm][xid, :]
            if ivar:
                for arm in 'BRZ':
                    ret[arm.lower() + '_ivar'] = ivars[arm][xid, :]

            rets.append(ret)
        si.cache[url] = copy.copy(fp._cache)
        return rets
Ejemplo n.º 5
0
    def _transfer_new_file(self, filename: str) -> None:
        """Transfer a routeview file into the cloud bucket.

    Args:
      filename: string of the format "routeviews-rv2-20200720-1200.pfx2as.gz"
    """
        year = filename[15:19]
        month = filename[19:21]

        url = CAIDA_ROUTEVIEW_DIR_URL + year + "/" + month + "/" + filename

        output_blob = self.caida_bucket.blob(
            os.path.join(self.bucket_routeview_path, filename))

        with httpio.open(url) as output:
            output_blob.upload_from_file(output)
Ejemplo n.º 6
0
def read_models(url, user, pwd, targetid, fiber, expid=None):

    block_size = 2880 * 10  # caching block
    kw = dict(auth=(user, pwd), verify=False)

    with httpio.open(url, block_size=block_size, **kw) as fp:
        if url in si.cache:
            fp._cache = si.cache[url]
        hdus = pyfits.open(fp)
        ftab = atpy.Table(hdus['FIBERMAP'].data)

        if expid is not None:
            xind = ftab['EXPID'] == expid
        else:
            xind = np.ones(len(ftab), dtype=bool)
        if targetid is not None:
            xids = np.nonzero((ftab['TARGETID'] == targetid) & xind)[0]
        else:
            xids = np.nonzero((ftab['FIBER'] == fiber) & xind)[0]

        if len(xids) == 0:
            print('no spectra')
            return []

        waves = {}
        for arm in 'BRZ':
            waves[arm] = hdus[arm + '_WAVELENGTH'].data

        models = {}
        for arm in 'BRZ':
            models[arm] = hdus[arm + '_MODEL'].section

        rets = []

        for xid in xids:
            ret = {}
            for arm in 'BRZ':
                ret[arm.lower() + '_wavelength'] = waves[arm]
                ret[arm.lower() + '_model'] = models[arm][xid, :]
            rets.append(ret)
        si.cache[url] = copy.copy(fp._cache)
        return rets
Ejemplo n.º 7
0
def _get_latest_generated_routeview_files() -> List[str]:
    """Get a list of recently created files CAIDA routeview files on their server.

  Returns:
    A list of filename strings
    ex ["routeviews-rv2-20200720-1200.pfx2as.gz",
        "routeviews-rv2-20200719-1200.pfx2as.gz"]
  """
    url = CAIDA_ROUTEVIEW_DIR_URL + CAIDA_CREATION_FILE
    output = io.TextIOWrapper(httpio.open(url), encoding="utf-8")

    files = []
    for line in output:
        if line[0] != "#":  # ignore comment lines
            # Line format:
            # 4492	1595262269	2020/07/routeviews-rv2-20200719-1200.pfx2as.gz
            # we only want the routeviews-rv2-20200719-1200.pfx2as.gz portion
            filename = pathlib.PurePosixPath(line.split()[2]).name
            files.append(filename)
    return files
Ejemplo n.º 8
0
def download_manual_routeviews(bucket: storage.bucket.Bucket) -> None:
    first_date = datetime.date(2018, 7, 27)  # Date of earliest data
    last_date = datetime.date.today()
    datelist = [
        first_date + datetime.timedelta(days=x)
        for x in range(0, (last_date - first_date).days + 1)
    ]

    for date in datelist:
        print("checking date {}".format(date))
        year, month, day = date.year, date.month, date.day

        path = f"http://data.caida.org/datasets/routing/routeviews-prefix2as/{year}/{month:02}/"
        # possible times are 0000 to 2200 in intervals of 200
        times = [
            "0000", "0200", "0400", "0600", "0800", "1000", "1200", "1400",
            "1600", "1800", "2000", "2200"
        ]
        for time in times:
            try:
                filename = f"routeviews-rv2-{year}{month:02}{day:02}-{time}.pfx2as.gz"
                url = path + filename
                cloud_filepath = "caida/routeviews/" + filename

                # This call will fail for most urls,
                # since we don't know which timestamp is correct.
                # In that case we just move on to our next guess.
                f = httpio.open(url)

                print(
                    f"mirroring {url} to gs://{bucket.name}/{cloud_filepath}")

                blob = bucket.blob(cloud_filepath)
                blob.upload_from_file(f)
            except requests.exceptions.HTTPError as ex:
                if ex.response.status_code != 404:
                    raise ex
#!/usr/bin/env python3
import httpio
import argparse

from zipfile import ZipFile
if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description='Get tile metadata from pivnet.',
        epilog=
        "example: ./get_pivnet_metadata.py --url 'https://network.pivotal.io/api/v2/products/elastic-runtime/releases/160817/product_files/193873/download' --token 'xxx'"
    )
    parser.add_argument('--url', help='download api link', required=True)
    parser.add_argument('--token',
                        help='Pivnet Legacy API Token',
                        required=True)
    args = parser.parse_args()

    with httpio.open(args.url,
                     allow_redirects=True,
                     headers={"Authorization": "Token " + args.token}) as f:
        zf = ZipFile(f)
        contents = zf.read("metadata/metadata.yml")
        print(contents.decode('utf-8'))
Ejemplo n.º 10
0
def get_rvspec_models(tileid=None,
                      night=None,
                      fiber=None,
                      targetid=None,
                      expid=None,
                      coadd=False,
                      run='200507'):
    """
    Get RVSpecfit models
    
    Parameters
    ----------
    tileid: int
    night: int
    fiber: int
    targetid: int
    expid: int (optional)
    coadd: bool 
         If true read coadded spectra
    run: string
         The string identifying a software run

    Returns
    -------
    ret: list(dict)
        The list of dictionaries where each dictionary
        has keywords b_wavelength, r_wavelength, z_wavelength
        b_model etc
    """

    if coadd:
        prefix = 'rvmod_coadd'
    else:
        prefix = 'rvmod_spectra'
    spectrograph = fiber // 500
    url = f'https://data.desi.lbl.gov/desi/science/mws/redux/andes/{run}/rv_output/{tileid}/{night}/{prefix}-{spectrograph}-{tileid}-{night}.fits'
    user, pwd = get_desi_login_password()
    kw = dict(auth=(user, pwd), verify=False)
    with httpio.open(url, **kw) as fp:
        hdus = pyfits.open(fp)
        ftab = atpy.Table(hdus['FIBERMAP'].data)

        if expid is not None:
            xind = ftab['EXPID'] == expid
        else:
            xind = np.ones(len(ftab), dtype=bool)
        xids = np.nonzero((ftab['TARGETID'] == targetid) & xind)[0]

        if len(xids) == 0:
            print('no spectra')
            return []
        bwave = hdus['B_WAVELENGTH'].data
        rwave = hdus['R_WAVELENGTH'].data
        zwave = hdus['Z_WAVELENGTH'].data

        rets = []
        bdata = hdus['B_MODEL'].section
        rdata = hdus['R_MODEL'].section
        zdata = hdus['Z_MODEL'].section
        rets = []
        for xid in xids:
            bdat = bdata[xid, :]
            rdat = rdata[xid, :]
            zdat = zdata[xid, :]

            ret = dict(b_wavelength=bwave,
                       r_wavelength=rwave,
                       z_wavelength=zwave,
                       b_model=bdat,
                       r_model=rdat,
                       z_model=zdat)
            rets.append(ret)
        return rets
Ejemplo n.º 11
0
def get_specs(tileid=None,
              night=None,
              fiber=None,
              targetid=None,
              expid=None,
              coadd=True,
              coadd_type='pernight',
              dataset='denali',
              mask=False,
              ivar=False):
    """
    Get DESI spectra
    
    Parameters
    ----------
    tileid: int
    night: int or string
         The night identifier (i.e. 20200220 or 'all' or 'deep' for coadds)
    fiber: int
    targetid: int (optional)
    expid: int (optional)
    coadd: bool
         If true read coadded spectra
    mask: bool
         If true return the masks as well
    ivar: bool
         If true return the inverse variances

    Returns
    -------
    ret: list(dict)
        The list of dictionaries for each observation
        where each dictionary
        has keywords b_wavelength, r_wavelength, z_wavelength
        b_flux, b_mask, b_ivar

    """
    if coadd:
        prefix = 'coadd'
    else:
        prefix = 'spectra'
    if fiber is None:
        raise Exception(
            'Fiber must be specified as it is needed to identify the ' +
            'spectrograph')
    spectrograph = fiber // 500
    if coadd_type == 'cumulative':
        night1 = f'thru{night}'
    else:
        night1 = night
    url = f'https://data.desi.lbl.gov/desi/spectro/redux/{dataset}/tiles/{coadd_type}/{tileid}/{night}/{prefix}-{spectrograph}-{tileid}-{night1}.fits'
    user, pwd = get_desi_login_password()
    kw = dict(auth=(user, pwd), verify=False)
    block_size = 2880 * 10  # caching block
    with httpio.open(url, block_size=block_size, **kw) as fp:
        if url in si.cache:
            fp._cache = si.cache[url]
        hdus = pyfits.open(fp)

        ftab = atpy.Table(hdus['FIBERMAP'].data)

        if expid is not None:
            xind = ftab['EXPID'] == expid
        else:
            xind = np.ones(len(ftab), dtype=bool)
        if targetid is not None:
            xids = np.nonzero((ftab['TARGETID'] == targetid) & xind)[0]
        else:
            xids = np.nonzero((ftab['FIBER'] == fiber) & xind)[0]
        if len(xids) == 0:
            print('no spectra')
            return []

        waves = {}
        for arm in 'BRZ':
            waves[arm] = hdus[arm + '_WAVELENGTH'].data

        fluxes = {}
        for arm in 'BRZ':
            fluxes[arm] = hdus[arm + '_FLUX'].section

        masks = {}
        if mask:
            for arm in 'BRZ':
                masks[arm] = hdus[arm + '_MASK'].section

        ivars = {}
        if ivar:
            for arm in 'BRZ':
                ivars[arm] = hdus[arm + '_IVAR'].section

        rets = []
        for xid in xids:
            ret = {}
            for arm in 'BRZ':
                ret[arm.lower() + '_wavelength'] = waves[arm]
                ret[arm.lower() + '_flux'] = fluxes[arm][xid, :]
            if mask:
                for arm in 'BRZ':
                    ret[arm.lower() + '_mask'] = masks[arm][xid, :]
            if ivar:
                for arm in 'BRZ':
                    ret[arm.lower() + '_ivar'] = ivars[arm][xid, :]

            rets.append(ret)
        si.cache[url] = copy.copy(fp._cache)
        return rets
Ejemplo n.º 12
0
def get_rvspec_models(tileid=None,
                      night=None,
                      fiber=None,
                      targetid=None,
                      expid=None,
                      coadd=True,
                      coadd_type='pernight',
                      run='210112',
                      dataset='denali'):
    """
    Get RVSpecfit models
    
    Parameters
    ----------
    tileid: int
    night: int
    fiber: int
    targetid: int
    expid: int (optional)
    coadd: bool
         If true read coadded spectra
    run: string
         The string identifying a software run
    dataset: the dataset fitted (i.e. andes/sv_daily)

    Returns
    -------
    ret: list(dict)
        The list of dictionaries where each dictionary
        has keywords b_wavelength, r_wavelength, z_wavelength
        b_model etc
    """

    if coadd:
        prefix = 'rvmod_coadd'
    else:
        prefix = 'rvmod_spectra'
    if fiber is None:
        raise Exception(
            'Fiber must be specified as it is needed to identify the ' +
            'spectrograph')
    spectrograph = fiber // 500
    if coadd_type == 'cumulative':
        night1 = f'thru{night}'
    else:
        night1 = night
    url = f'https://data.desi.lbl.gov/desi/science/mws/redux/{dataset}/rv_output/{run}/{coadd_type}/{tileid}/{night}/{prefix}-{spectrograph}-{tileid}-{night1}.fits'
    block_size = 2880 * 10  # caching block
    user, pwd = get_desi_login_password()
    kw = dict(auth=(user, pwd), verify=False)

    with httpio.open(url, block_size=block_size, **kw) as fp:
        if url in si.cache:
            fp._cache = si.cache[url]
        hdus = pyfits.open(fp)
        ftab = atpy.Table(hdus['FIBERMAP'].data)

        if expid is not None:
            xind = ftab['EXPID'] == expid
        else:
            xind = np.ones(len(ftab), dtype=bool)
        if targetid is not None:
            xids = np.nonzero((ftab['TARGETID'] == targetid) & xind)[0]
        else:
            xids = np.nonzero((ftab['FIBER'] == fiber) & xind)[0]

        if len(xids) == 0:
            print('no spectra')
            return []

        waves = {}
        for arm in 'BRZ':
            waves[arm] = hdus[arm + '_WAVELENGTH'].data

        models = {}
        for arm in 'BRZ':
            models[arm] = hdus[arm + '_MODEL'].section

        rets = []

        for xid in xids:
            ret = {}
            for arm in 'BRZ':
                ret[arm.lower() + '_wavelength'] = waves[arm]
                ret[arm.lower() + '_model'] = models[arm][xid, :]
            rets.append(ret)
        si.cache[url] = copy.copy(fp._cache)
        return rets
Ejemplo n.º 13
0
def get_specs(tileid=None,
              night=None,
              fiber=None,
              targetid=None,
              expid=None,
              coadd=False,
              dataset='andes',
              mask=False,
              ivar=False):
    """
    Get DESI spectra 
    
    Parameters
    ----------
    tileid: int
    night: int
    fiber: int
    targetid: int
    expid: int (optional)
    coadd: bool 
         If true read coadded spectra

    Returns
    -------
    ret: list(dict)
        The list of dictionaries where each dictionary
        has keywords b_wavelength, r_wavelength, z_wavelength
        b_flux etc

    """
    if coadd:
        prefix = 'coadd'
    else:
        prefix = 'spectra'
    spectrograph = fiber // 500
    url = f'https://data.desi.lbl.gov/desi/spectro/redux/{dataset}/tiles/{tileid}/{night}/{prefix}-{spectrograph}-{tileid}-{night}.fits'
    user, pwd = get_desi_login_password()
    kw = dict(auth=(user, pwd), verify=False)
    block_size = 2880 * 10  # caching block
    with httpio.open(url, block_size=block_size, **kw) as fp:
        if url in si.cache:
            fp._cache = si.cache[url]
        hdus = pyfits.open(fp)

        ftab = atpy.Table(hdus['FIBERMAP'].data)

        if expid is not None:
            xind = ftab['EXPID'] == expid
        else:
            xind = np.ones(len(ftab), dtype=bool)
        if targetid is not None:
            xids = np.nonzero((ftab['TARGETID'] == targetid) & xind)[0]
        else:
            xids = np.nonzero((ftab['FIBER'] == fiber) & xind)[0]
        if len(xids) == 0:
            print('no spectra')
            return []

        waves = {}
        for arm in 'BRZ':
            waves[arm] = hdus[arm + '_WAVELENGTH'].data

        fluxes = {}
        for arm in 'BRZ':
            fluxes[arm] = hdus[arm + '_FLUX'].section

        masks = {}
        if mask:
            for arm in 'BRZ':
                masks[arm] = hdus[arm + '_MASK'].section

        ivars = {}
        if ivar:
            for arm in 'BRZ':
                ivars[arm] = hdus[arm + '_IVAR'].section

        rets = []
        for xid in xids:
            ret = {}
            for arm in 'BRZ':
                ret[arm.lower() + '_wavelength'] = waves[arm]
                ret[arm.lower() + '_flux'] = fluxes[arm][xid, :]
            if mask:
                for arm in 'BRZ':
                    ret[arm.lower() + '_mask'] = masks[arm][xid, :]
            if ivar:
                for arm in 'BRZ':
                    ret[arm.lower() + '_ivar'] = ivars[arm][xid, :]

            rets.append(ret)
        si.cache[url] = copy.copy(fp._cache)
        return rets
Ejemplo n.º 14
0
 def _open_remote_xml_file(self):
     with httpio.open(self.url) as remote_file:
         with zipfile.ZipFile(remote_file) as zipf:
             with self._open_xml_file_from_zip(zipf) as xml_file:
                 yield xml_file
Ejemplo n.º 15
0
 def _open_remote_xml_file(self):
     with httpio.open(self.url) as remote_file:
         with zipfile.ZipFile(remote_file) as zipf:
             with self._open_xml_file_from_zip(zipf) as xml_file:
                 yield xml_file
Ejemplo n.º 16
0
def main():
    parser = argparse.ArgumentParser()
    # formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument("-s", "--serial", help="device serial number")
    parser.add_argument("-V",
                        "--server-version",
                        action="store_true",
                        help="show adb server version")
    parser.add_argument("-l",
                        "--list",
                        action="store_true",
                        help="list devices")
    parser.add_argument("-i",
                        "--install",
                        help="install from local apk or url")
    parser.add_argument(
        "--install-confirm",
        action="store_true",
        help="auto confirm when install (based on uiautomator2)")
    parser.add_argument("-u", "--uninstall", help="uninstall apk")
    parser.add_argument("-L",
                        "--launch",
                        action="store_true",
                        help="launch after install")
    parser.add_argument("--qrcode", help="show qrcode of the specified file")
    parser.add_argument("--parse",
                        type=str,
                        help="parse package info from local file or url")
    parser.add_argument("--clear",
                        action="store_true",
                        help="clear all data when uninstall")
    parser.add_argument("--list-packages",
                        action="store_true",
                        help="list packages installed")
    parser.add_argument("--current",
                        action="store_true",
                        help="show current package info")
    parser.add_argument("-p",
                        "--package",
                        help="show package info in json format")
    parser.add_argument("--grep", help="filter matched package names")
    parser.add_argument("--connect", type=str, help="connect remote device")
    parser.add_argument("--shell",
                        action="store_true",
                        help="run shell command")
    parser.add_argument("--minicap",
                        action="store_true",
                        help="install minicap and minitouch to device")
    parser.add_argument("--screenshot", type=str, help="take screenshot")
    parser.add_argument("-b", "--browser", help="open browser in device")
    parser.add_argument(
        "--push",
        help=
        "push local to remote, arg is colon seperated, eg some.txt:/sdcard/s.txt"
    )
    parser.add_argument(
        "--pull",
        help="push local to remote, arg is colon seperated, eg /sdcard/some.txt"
    )
    parser.add_argument("--dump-info",
                        action="store_true",
                        help="dump info for developer")
    parser.add_argument("--track",
                        action="store_true",
                        help="trace device status")
    parser.add_argument("args", nargs="*", help="arguments")

    args = parser.parse_args()

    if args.connect:
        adbclient.connect(args.connect)
        return

    if args.server_version:
        print("ADB Server version: {}".format(adbclient.server_version()))
        return

    if args.list:
        rows = []
        for d in adbclient.device_list():
            rows.append([d.serial, d.shell("getprop ro.product.model")])
        lens = []
        for col in zip(*rows):
            lens.append(max([len(v) for v in col]))
        format = "  ".join(["{:<" + str(l) + "}" for l in lens])
        for row in rows:
            print(format.format(*row))
        return

    if args.qrcode:
        from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer

        filename = args.qrcode
        port = 8000
        url = "http://%s:%d/%s" % (current_ip(), port, filename)
        print("File URL:", url)
        try:
            import qrcode
            qr = qrcode.QRCode(border=2)
            qr.add_data(url)
            qr.print_ascii(tty=True)
        except ImportError:
            print(
                "In order to show QRCode, you need install with: pip3 install qrcode"
            )

        httpd = ThreadingHTTPServer(('', port), SimpleHTTPRequestHandler)
        httpd.serve_forever()
        return

    if args.dump_info:
        print("==== ADB Info ====")
        print("Path:", adbutils.adb_path())
        print("Server version:", adbclient.server_version())
        print("")
        print(">> List of devices attached")
        for d in adbclient.device_list():
            print("-", d.serial, d.prop.name, d.prop.model)
        return

    if args.track:
        for event in adbclient.track_devices():
            asctime = datetime.datetime.now().strftime("%H:%M:%S.%f")
            print("{} {} -> {}".format(asctime[:-3], event.serial,
                                       event.status))
        return

    elif args.parse:
        uri = args.parse

        fp = None
        if re.match(r"^https?://", uri):
            try:
                import httpio
            except ImportError:
                retcode = subprocess.call(
                    [sys.executable, '-m', 'pip', 'install', '-U', 'httpio'])
                assert retcode == 0
                import httpio
            fp = httpio.open(uri, block_size=-1)
        else:
            assert os.path.isfile(uri)
            fp = open(uri, 'rb')
        try:
            ar = APKReader(fp)
            ar.dump_info()
        finally:
            fp.close()
        return

    ## Device operation
    d = adbclient.device(args.serial)

    if args.shell:
        output = d.shell(args.args)
        print(output)
        return

    if args.install:

        def _callback(event_name: str, ud):
            name = "_INSTALL_"
            if event_name == "BEFORE_INSTALL":
                print("== Enable popup window watcher")
                ud.press("home")
                ud.watcher(name).when("允许").click()
                ud.watcher(name).when("继续安装").click()
                ud.watcher(name).when("安装").click()
                ud.watcher.start()
            elif event_name == "FINALLY":
                print("== Stop popup window watcher")
                ud.watcher.remove(name)
                ud.watcher.stop()

        if args.install_confirm:
            import uiautomator2 as u2
            ud = u2.connect(args.serial)
            _callback = functools.partial(_callback, ud=ud)
        else:
            _callback = None

        d.install(args.install, uninstall=True, callback=_callback)

    elif args.uninstall:
        d.uninstall(args.uninstall)

    elif args.list_packages:
        patten = re.compile(args.grep or ".*")
        for p in d.list_packages():
            if patten.search(p):
                print(p)

    elif args.screenshot:
        if args.minicap:

            def adb_shell(cmd: list):
                print("Run:", " ".join(["adb", "shell"] + cmd))
                return d.shell(cmd).strip()

            json_output = adb_shell([
                "LD_LIBRARY_PATH=/data/local/tmp", "/data/local/tmp/minicap",
                "-i", "2&>/dev/null"
            ])
            if not json_output.startswith("{"):
                raise RuntimeError("Invalid json format", json_output)
            data = json.loads(json_output)

            w, h, r = data["width"], data["height"], data["rotation"]
            d.shell([
                "LD_LIBRARY_PATH=/data/local/tmp", "/data/local/tmp/minicap",
                "-P", "{0}x{1}@{0}x{1}/{2}".format(w, h, r), "-s",
                ">/sdcard/minicap.jpg"
            ])
            d.sync.pull("/sdcard/minicap.jpg", args.screenshot)
        else:
            remote_tmp_path = "/data/local/tmp/screenshot.png"
            d.shell(["rm", remote_tmp_path])
            d.shell(["screencap", "-p", remote_tmp_path])
            d.sync.pull(remote_tmp_path, args.screenshot)

    elif args.minicap:  # without args.screenshot
        _setup_minicap(d)

    elif args.push:
        local, remote = args.push.split(":", 1)
        length = os.stat(local).st_size
        with open(local, "rb") as fd:
            r = ReadProgress(fd, length)
            d.sync.push(r, remote, filesize=length)

    elif args.pull:
        remote_path = args.pull
        target_path = os.path.basename(remote_path)
        finfo = d.sync.stat(args.pull)

        if finfo.mode == 0 and finfo.size == 0:
            sys.exit(f"remote file '{remote_path}' does not exist")

        bytes_so_far = 0
        for chunk in d.sync.iter_content(remote_path):
            bytes_so_far += len(chunk)
            percent = bytes_so_far / finfo.size * 100 if finfo.size != 0 else 100.0
            print(
                f"\rDownload to {target_path} ... [{bytes_so_far} / {finfo.size}] %.1f %%"
                % percent,
                end="",
                flush=True)
        print(f"{remote_path} pulled to {target_path}")

    elif args.browser:
        d.open_browser(args.browser)

    elif args.current:
        package_name = d.current_app()['package']
        info = d.package_info(package_name)
        print(json.dumps(info, indent=4, default=str))

    elif args.package:
        info = d.package_info(args.package)
        print(json.dumps(info, indent=4, default=str))