def repeat_task(self, t: str): today = arrow.now().format("YYYY-MM-DD") if today not in self._tasks[t]: self._tasks[t].append(today) cloud.encode_remote(self._tasks, self._file) cf.info(f"Task [{t}] was repeated.")
def add(self, keyword: str, url: str) -> None: self.reload() if keyword in self.json: cf.warning( f'{keyword} with URL {self.json[keyword]} already added.') else: self.json[keyword] = url self._update_remote() cf.info(f'{keyword} with URL {self.json[keyword]} added SUCCESS.')
def run_task(msg: dict, func): key = io.read(SALT, '') if certify_token(key, msg.get('token')): cf.info('certify_token SUCCESS') content = msg.get('text') if content is not None: func(content) return 'SUCCESS' else: return 'FAILED'
def add_task(self, t: str): today = arrow.now().format("YYYY-MM-DD") if t not in self._tasks: self._tasks[t] = [] if today not in self._tasks[t]: self._tasks[t].append(today) cloud.encode_remote(self._tasks, self._file) cf.info(f"New task [{t}] was created.")
def tweet(): msg = request.get_json() key = io.read(SALT, '') if not certify_token(key, msg.get('token')): return 'FAILED' text = cf.utils.decipher(key, msg.get('text')) media = [f'/tmp/{e}' for e in msg.get('media')] cf.info(f'Input tweet: {text} / ' + ''.join(media)) Twitter().post([text] + media) return 'SUCCESS'
def _sync(): cli = Bucket() if len(sys.argv) > 1: cf.utils.shell('zip -r9 -P syncsync63 -FSr /tmp/sync.zip {}'.format( ' '.join(sys.argv[1:]))) cf.info('Files zipped.') cli.upload('/tmp/sync.zip') else: cli.download('sync.zip', '/tmp/sync.zip') cf.info('zip filed downloaded.') cf.utils.shell('unzip -P syncsync63 -o /tmp/sync.zip')
def publish_message(self, message: dict): '''message demo: {'text': 'Hello, this is John from Ohio.', 'token': 'hmac token'} ''' msg = json.loads(message.body) key = io.read(SALT, '') if certify_token(key, msg.get('token')): cf.info('certify_token SUCCESS') if msg.get('text') is not None: bot_messalert(msg.get('text')) else: cf.warning('certify_token FAILED')
def remove(self, keyword: str = '', url: str = '') -> None: self.reload() ''' can delete by either keyword or URL''' _tuple = ('', '') if keyword and keyword in self.json: _tuple = (keyword, self.json[keyword]) del self.json[keyword] elif url: for _key in list(self.json): if self.json[_key] == url: _tuple = (_key, self.json[_key]) del self.json[_key] self._update_remote() cf.info(f'{_tuple[0]} / {_tuple[1]} remove SUCCESS')
def create_random_file(size: int = 100): # Default 100M _file = 'cc.txt' open(_file, 'w').write("") print(f">> Create {_file} of size {size} MB") logfile = '/tmp/ddfile.txt' if io.exists(logfile): for f in io.read(logfile): cf.info('removing previous file', f) io.rm(f) with open(_file, 'ab') as fout: cc_dir = os.path.join(io.pwd().rstrip(), _file) io.write([cc_dir], logfile) for _ in tqdm(range(size)): fout.write(os.urandom(1024 * 1024))
def reminder(self) -> list: ''' return list of tasks to be done. ''' tasks = sorted(self._tasks.items(), key=operator.itemgetter(0)) todo = [] for t, ts in tasks: fibs = [2, 3] while len(fibs) < len(ts): fibs.append(fibs[-1] + fibs[-2]) if arrow.now().format("YYYY-MM-DD") != ts[-1]: days_elapsed = (arrow.now() - arrow.get(ts[len(ts) - 1])).days if days_elapsed > fibs[-1]: msg = f"[{t}] X {len(ts)} " + ' / '.join(ts) cf.info(msg) todo.append(msg) return todo
def surf(): cf.net.post('http://127.0.0.1:4151/pub?topic=web&channel=surf', data={'data': 42}) s = requests.Session() if not ds.DOMAINS: ds.DOMAINS = io.read('/tmp/cnlist.txt') domain = random.sample(ds.DOMAINS, 1)[0] try: url = domain if domain.startswith('http') else 'http://' + domain if random.randint(1, 100) > 50: url = url.replace('https', 'http') cf.info('visiting ' + url) r = s.get(url, headers=fake_headers(), timeout=1) soup = BeautifulSoup(r.text, 'html.parser') if url.endswith(('png', 'jpg', 'txt', 'json', 'jpeg', 'mp3', 'mp4', 'wav', 'csv', 'pdf', 'mobi')): cf.info('Downloading {}'.format(url)) download(url, name='/tmp/websurf.png') io.write(r.text, '/tmp/tmp') for link in soup.find_all('a'): _url = link.get('href') if _url and _url.startswith('http'): ds.DOMAINS.append(_url) # refresh urls ds.DOMAINS = list(set(ds.DOMAINS)) ds.HEADERS.append(r.headers) except Exception as e: if domain in ds.DOMAINS: ds.DOMAINS.remove(domain) cf.error(str(e)) finally: time.sleep(random.randint(1, 3)) ds.DOMAINS = sorted(ds.DOMAINS, key=lambda e: len(e), reverse=True) ds.HEADERS = ds.HEADERS[:10000]
def nsq_sync(): cli = Bucket() if len(sys.argv) > 1: cf.utils.shell('zip -r9 -P syncsync63 -FSr /tmp/sync.zip {}'.format( ' '.join(sys.argv[1:]))) cf.info('Files zipped.') cli.upload('/tmp/sync.zip') token = generate_token(cf.file.reads(SALT)) _uuid = cf.utils.uuid(), jsn.write({'uuid': _uuid}, '/tmp/syncfile.json') js = { 'token': token, 'topic': 'file', 'channel': 'sync', 'uuid': _uuid, 'data': { 'uuid': _uuid, 'filename': 'sync.zip' } } res = cf.net.post('http://a.ddot.cc:6363/nsq', json=js) cf.info('NSQ sync', res.text)
def publish_message(self, message: dict): msg = json.loads(message.body) cf.info(msg) _uuid = msg['data']['uuid'] if io.exists('/tmp/syncfile.json') and jsn( '/tmp/syncfile.json')['uuid'] == _uuid: return filename = msg['data']['filename'] from dofast.oss import Bucket cf.info('Downloading:', filename) Bucket().download(filename, f'/tmp/{filename}') cf.info('Download completed:', filename) cf.utils.shell('unzip -P syncsync63 -d . -o /tmp/sync.zip') cf.info('File(s) unzipped.')
def publish_message(self, message: dict): cf.info(json.loads(message.body)) cf.info(str(message), 'SUCCESS') return True
import json import os import sys import traceback from abc import ABCMeta, abstractmethod from threading import Thread import codefast as cf import nsq cf.logger.level = 'info' cf.info('start') class Config: from pathlib import Path config_file = os.path.join(Path.home(), '.config/nsq.json') default_tcp = 'localhost:4150' TCP = jsn.read(config_file).get( 'TCP', 'localhost:4150') if io.exists(config_file) else default_tcp class ExcThread(Thread): def __init__(self, target, args, name=''): super(ExcThread, self).__init__(target=target, args=args, name=name) self.func = target self.args = args self.name = name self.exitcode = 0 self.exception = None self.exc_traceback = ''
import random import sys import time import codefast as cf import requests from bs4 import BeautifulSoup from faker import Faker from dofast.utils import download from .consumer import Consumer cf.logger.level = 'info' cf.info('Go.') class ds: DOMAINS = io.read('/tmp/cnlist.txt') HEADERS = io.read('/tmp/headers.txt') def fake_headers(): h = jsn.read(random.sample(ds.HEADERS, 1)[0]) h['User-Agent'] = Faker().user_agent() res = dict( (k, h[k]) for k in list(h) if k not in ('Date', 'Vary', 'Server')) return res def surf():