Exemplo n.º 1
0
def start(web_dir):
	global _web_dir, started, _session
	_web_dir = os.path.abspath(web_dir)
	_session = sql.session()
	if not settings.get('interface.start_server'):
		print('WebUI is disabled by settings.')
		return False
	browser = settings.get('interface.browser').lower().strip()
	browser = None if (browser == 'off') else browser
	options = {
		'app_mode': '-app' in browser,
		'mode': browser.replace('-app', ''),
		'host': settings.get('interface.host'),
		'port': settings.get('interface.port'),
		'block': False,
		'close_callback': _websocket_close,
		'all_interfaces': False,
		# 'app': btl.default_app()
	}

	eel.init(web_dir)
	eel.start('index.html', **options)
	print('Started WebUI!')
	if browser:
		print('Awaiting connection from browser...')
	else:
		print('Browser auto-opening is disabled! Please open a browser to http://%s:%s/index.html !' %
			  (options['host'], options['port']))
	started = True
	return True
Exemplo n.º 2
0
    def load(self):
        """ Threaded loading of elements. """
        settings.from_json(self.settings)
        sql.init_from_settings()
        self._session = sql.session()
        t_start = datetime.now()  #vy
        print("Started loading.")  #vy
        self.progress.set_scanning(True)

        retry_failed = settings.get('processing.retry_failed')

        # Query for all unhandled URLs, and submit them before scanning for new Posts.
        unfinished = self._session\
         .query(sql.URL)\
         .filter((sql.URL.processed == False) | \
          (retry_failed and sql.URL.failed and \
           sql.not_(sql.URL.failure_reason.contains('404'))))\
         .all()
        print("Loading %s unfinished urls" % len(unfinished))
        self._push_url_list(unfinished)

        self._scan_sources()

        self.progress.set_scanning(False)
        # Wait for any remaining ACKS to come in, before closing the writing pipe.
        # ...Until the Downloaders have confirmed completion of everything, more album URLS may come in.
        while len(self._open_ack) > 0 and not self._stop_event.is_set():
            self._handle_acks(timeout=1.0, clear=True)
        print("Finished loading.")  #vy
        print("Elapsed time: %s" % str(datetime.now() - t_start))  #vy
        sql.close()
Exemplo n.º 3
0
    def load(self):
        """ Threaded loading of elements. """
        settings.from_json(self.settings)
        sql.init_from_settings()
        self._session = sql.session()

        self.progress.set_scanning(True)

        retry_failed = settings.get('processing.retry_failed')

        # Query for all unhandled URLs, and submit them before scanning for new Posts.
        unfinished = self._session\
         .query(sql.URL)\
         .filter((sql.URL.processed == False) | (retry_failed and sql.URL.failed == True))\
         .all()
        self._push_url_list(unfinished)

        self._scan_sources()

        self.progress.set_scanning(False)
        # Wait for any remaining ACKS to come in, before closing the writing pipe.
        # ...Until the Downloaders have confirmed completion of everything, more album URLS may come in.
        while len(self._open_ack) > 0 and not self._stop_event.is_set():
            self._handle_acks(timeout=0.5)
        print("Finished loading.")
        sql.close()
Exemplo n.º 4
0
def start(web_dir, file_dir, rmd_version):
	global _file_dir, _web_dir, _rmd_version, started, _session
	_file_dir = os.path.abspath(file_dir)
	_web_dir = os.path.abspath(web_dir)
	_rmd_version = rmd_version
	_session = sql.session()
	if not settings.get('interface.start_server'):
		print('WebUI is disabled by settings.')
		return False
	browser = settings.get('interface.browser').lower().strip()
	browser = None if (browser == 'off') else browser
	options = {
		'mode': browser,
		'host': settings.get('interface.host'),
		'port': settings.get('interface.port'),
		'chromeFlags': []
	}

	eel.init(web_dir)
	eel.start('index.html', options=options, block=False, callback=_websocket_close)
	# interface.port
	print('Started WebUI!')
	if browser:
		print('Awaiting connection from browser...')
	else:
		print('Browser auto-opening is disabled! Please open a browser to http://%s:%s/index.html !' %
			  (options['host'], options['port']))
	started = True
	return True
 def test_existing_file_name(self):
     """ Generating an incremented file name should work """
     tp = self.sess.query(
         sql.Post).filter(sql.Post.reddit_id == 't3_ahal9v').first()
     file = ng.choose_file_name(tp.urls[0], tp, sql.session(), album_size=1)
     self.assertTrue(file.endswith(' - 2'),
                     msg='Failed to increment duplicate post!')
 def test_choose_file_name(self):
     """ Generating a new file name should work """
     tp = self.sess.query(sql.Post).filter(sql.Post.title == 'test').first()
     file = ng.choose_file_name(tp.urls[0], tp, sql.session(), album_size=1)
     self.assertEqual('aww/test - (testuser)',
                      file,
                      msg='Failed to convert basic Test post!')
 def setUp(self):
     importlib.reload(settings)
     importlib.reload(sql)
     importlib.reload(ng)
     settings.load(self.settings_file)
     sql.init_from_settings()
     self.sess = sql.session()
 def test_pattern_fail_load(self):
     """ Invalid patterns should fail """
     settings.put('output.file_name_pattern',
                  '[type]-[id]-[title]-[author-[subreddit]-[source_alias]')
     tp = self.sess.query(sql.Post).filter(sql.Post.title == 'test').first()
     with self.assertRaises(Exception,
                            msg='Failed to catch broken pattern!'):
         ng.choose_file_name(tp.urls[0], tp, sql.session(), album_size=1)
    def test_album_filename(self):
        """ Generating new & incremented album names should work """
        tp = self.sess.query(sql.Post).join(
            sql.URL).filter(sql.Post.reddit_id == 't3_98crc8').first()
        file = ng.choose_file_name(tp.urls[0],
                                   tp,
                                   sql.session(),
                                   album_size=1000)
        self.assertEqual('aww/album - (testuser2)/0001',
                         file,
                         msg='Failed to generate new Album foldername!')

        np = self.sess.query(sql.Post).join(
            sql.URL).filter(sql.Post.reddit_id == 't3_awyf90').first()
        file = ng.choose_file_name(np.urls[0], np, sql.session(), album_size=1)
        self.assertEqual('aww/album - (testuser2) - 2/1',
                         file,
                         msg='Failed to create separate album folder!')
Exemplo n.º 10
0
 def setUp(self):
     global download_ran, session
     if not download_ran:
         download_ran = True
         settings.load(self.settings_file)
         tui = TerminalUI()
         tui.display()
         self.db_path = join(settings.get('output.base_dir'),
                             'manifest.sqlite')
         sql.init_from_settings()
         session = sql.session()
Exemplo n.º 11
0
	def setUp(self):
		global download_ran, session
		if not download_ran:
			download_ran = True
			settings.load(self.settings_file)
			tui = TerminalUI()
			tui.display()
			# self.db_path = join(settings.get('output.base_dir'), 'manifest.sqlite')
			self.db_path = join(settings.get('output.manifest_for_sqlite_dir'), 'manifest.sqlite')		# This is part of the change to save manifest.sqlite to a different directory than the downloads
			sql.init_from_settings()
			session = sql.session()
def make_migration():
    conn = sql.session()
    alembic_cfg, script_, context = sql.get_alembic_ctx(conn)
    message = console.string('Enter a message for the migration')
    if not message:
        print('Skipping migration.')
        return
    res = command.revision(message=message,
                           autogenerate=True,
                           config=alembic_cfg)
    print('Generated Migration:', res)
    print('Finished.')
 def test_pattern_loader(self):
     """ All pattern tags should work """
     settings.put(
         'output.file_name_pattern',
         '[type]-[reddit_id]-[title]-[author]-[subreddit]-[source_alias]-[created_utc]-[created_date]-[created_time]'
     )
     tp = self.sess.query(sql.Post).filter(sql.Post.title == 'test').first()
     file = ng.choose_file_name(tp.urls[0], tp, sql.session(), album_size=1)
     self.assertRegex(
         file,
         r'Submission-t3_b1rycu-test-testuser-aww-newsource-1552739416-2019-..-..-..\...\...',
         msg='Failed to convert basic Test post!')
 def setUp(self):
     global download_ran, session
     if not download_ran:
         download_ran = True
         importlib.reload(settings)
         importlib.reload(sql)
         settings.load(self.settings_file)
         tui = TerminalUI()
         tui.display()
         sql.init_from_settings()
         self.db_path = sql.get_file_location()
         session = sql.session()
	def setUp(self):
		global download_ran, session, thread
		if not download_ran:
			download_ran = True
			self.wui = WebUI('test_version')
			self.db_path = join(settings.get('output.base_dir'), 'manifest.sqlite')
			self.url = 'http://%s:%s/index.html#' % (settings.get('interface.host'), settings.get('interface.port'))

			settings.load(self.settings_file)
			settings.put('interface.start_server', True)
			sql.init_from_settings()
			session = sql.session()
			thread = Thread(target=self.wui.display)
			thread.setDaemon(True)
			thread.start()
			self.assertTrue(self.wui.waitFor(10), msg='WebUI Failed to start!')
Exemplo n.º 16
0
    def test_full_encode(self):
        """ The entire nested relation should serialize """
        p = sql.session().query(sql.Post).join(sql.URL).join(sql.File).first()
        self.assertTrue(p, msg='Failed to find a test Post.')
        for u in p.urls:
            self.assertTrue(u.file, msg="URL is missing a file! %s" %
                            u)  # These are lazy loaded, so check them all.
        ser = sql.encode_safe(p)

        self.assertTrue(
            ser, msg='Failed to properly encode full stack Post into Object!')
        self.assertGreater(len(ser['urls']),
                           0,
                           msg='Lost Post URLs in encode!')
        for u in ser['urls']:
            self.assertIn('file', u, msg='Lost file in URL encode! %s' % u)
    def run(self):
        """ Threaded loading of elements. """
        settings.from_json(self._settings)
        sql.init_from_settings()
        self._session = sql.session()
        self.progress.clear(status="Starting up...")
        self.progress.set_running(True)

        while not self._stop_event.is_set():
            self._dedupe()
            self.progress.set_status("Waiting for new files...")
            self._stop_event.wait(2)
        self._dedupe()  # Run one final pass after downloading stops.

        self.progress.set_running(False)
        sql.close()
        self.progress.clear("Finished.")
Exemplo n.º 18
0
 def add_test(inf):
     sess = sql.session()
     lst = sess.query(sql.URL).all()
     st = time.time()
     sent = []
     for l in lst:
         in_queue.put_nowait(l.id)
         inf['sent'] += 1
         sent.append(l.id)
     while time.time() - st < 30 and sent:
         try:
             rd = ack_queue.get(block=True, timeout=.5)
             inf['ack'] += 1
             sent.remove(rd.url_id)
         except queue.Empty:
             pass
     sess.close()
     stop_event.set()
	def start(self):
		os.makedirs(self.new_save_base, exist_ok=True)
		if not settings.load(self.settings_file):
			raise Exception("You must provide a valid settings.json file!")
		settings.put('output.base_dir', self.new_save_base)
		sql.init_from_settings()
		self.session = sql.session()
		print("Scanning legacy posts...")
		self.scan()
		print("Found %s elements total." % len(self.posts.keys()))
		self.process_posts()
		self.session.commit()
		print("Processed:", len(self.posts), "Posts.")
		print("Failed to convert %s Posts." % len(self.failures))
		outfile = os.path.join(self.new_save_base, 'failed_conversion_posts.json')
		with open(outfile, 'w') as o:
			o.write(json.dumps(self.failures, indent=4, sort_keys=True, separators=(',', ': ')))
			print("Saved a list of failed posts to", outfile, ', be sure to check these before deleting anything!')
Exemplo n.º 20
0
    def run(self):
        """ Threaded loading of elements. """
        settings.from_json(self._settings)
        sql.init_from_settings()
        try:
            self._session = sql.session()
            self.progress.clear(status="Starting up...")
            self.progress.set_running(True)

            while not self._stop_event.is_set():
                self._dedupe()
                self.progress.set_status("Ready for new files...")
                self._stop_event.wait(2)
            self._dedupe()  # Run one final pass after downloading stops.
            self.progress.clear(status="Finished.", running=False)
        except Exception as ex:
            print('Deduplication Process Error:', ex)
            self.progress.set_error(ex)
            self.progress.set_running(False)
            traceback.print_exc()
        finally:
            sql.close()
Exemplo n.º 21
0
    def run(self):
        """ Threaded loading of elements. """
        settings.from_json(self._settings)
        sql.init_from_settings()
        print("Starting up...", debug=True)
        try:
            self._session = sql.session()
            self.progress.clear(status="Starting up...")
            self.progress.set_running(True)
            self.dedup_ignore_ids = set()
            self.prune_counter = 0
            self.special_hashes = self._session.query(Hash).filter(
                Hash.id < 0).all()

            while not self._stop_event.is_set():
                #print("_stop_event is %s"%self._stop_event.is_set(), debug=True)
                completed = self._dedupe()
                if completed:
                    self.progress.set_status(
                        "Completed %s files. Ready for new files..." %
                        completed)
                    self._stop_event.wait(1)
                else:
                    self._stop_event.wait(10)
            print("_stop_event is %s" % self._stop_event.is_set(), debug=True)
            self._dedupe()  # Run one final pass after downloading stops.
            self.progress.clear(status="Finished.", running=False)
        except Exception as ex:
            print('Deduplication Process Error:', ex)
            self.progress.set_error(ex)
            self.progress.set_running(False)
            traceback.print_exc()
        finally:
            print("Finished process, _stop_event is %s" %
                  self._stop_event.is_set(),
                  debug=True)
            sql.close()
Exemplo n.º 22
0
	def __init__(self, bot: Bot):
		self.bot = bot
		self.sql: Session = sql.session()
		self.handler = BotMessageHandler(self.sql)
		self.settings = settings.SettingWrapper(self.__class__.__name__)
		print("Registered Cog:", self.__class__.__name__)
Exemplo n.º 23
0
"Custom source importer"
import os
import static.filesystem as fs
import static.settings as settings
import sources
from datetime import datetime
import sql
from sql import Post, File, URL, Hash
from sqlalchemy.sql.expression import func

#datetime.now().strftime("%c %X")
settings_file = fs.find_file('settings.json')
_loaded = settings.load(settings_file)
_session = sql.session()

url_patt = r'^(?:(?!(youtu\.be|youtube\.com|amazon\.c|twitter\.c|instagram\.com)).)*$'


def strf_utc(sec):
    return datetime.fromtimestamp(sec).strftime("%y-%m-%d %H:%M:%S")


def user_source(name,
                alias=None,
                ps=False,
                limit=None,
                deep=False,
                check_last=None,
                check_utc=0):
    u = name.replace('/u/', '').replace('u/', '').strip('/')
    out = {
Exemplo n.º 24
0
    def run(self):
        """ Threaded loading of elements. """
        settings.from_json(self._settings)
        sql.init_from_settings()
        self._session = sql.session()
        self.progress.clear(status="Starting up...", running=True)
        failed = False

        for nxt_id in self._reader:
            try:
                url = self._session.query(
                    sql.URL).filter(sql.URL.id == nxt_id).first()
                if not url:
                    raise Exception("Unknown URL ID provided: (%s}" % nxt_id)

                file = url.file
                path = SanitizedRelFile(base=settings.get("output.base_dir"),
                                        file_path=str(file.path))

                self.progress.set_file(path.relative())
                self.progress.set_status("Attempting to Handle URL...")
                self.progress.set_running(True)

                task = handlers.HandlerTask(url=url.address, file_obj=path)
                resp = handlers.handle(task, self.progress)

                is_album_parent = False

                with self._db_lock:
                    if resp.album_urls:
                        if url.album_id:
                            resp.album_urls = [
                            ]  # Ignore nested Albums to avoid recursion.
                        else:
                            url.album_id = str(uuid.uuid4())
                            is_album_parent = True
                    else:
                        resp.album_urls = []

                    url.failed = not resp.success
                    url.failure_reason = resp.failure_reason
                    url.last_handler = resp.handler
                    url.album_is_parent = is_album_parent

                    if resp.rel_file:
                        file.downloaded = True
                        file.path = resp.rel_file.relative()
                        file.hash = None
                        utime(resp.rel_file.absolute(), times=(time(), time()))

                    self._session.commit()

                # Once *all* processing is completed on this URL, the Downloader needs to ACK it.
                # If any additional Album URLS were located, they should be sent before the ACK.
                self._ack_queue.put(
                    AckPacket(url_id=nxt_id, extra_urls=resp.album_urls))
                self.progress.clear(status="Waiting for URL...")
            except Exception as ex:
                failed = str(ex)
                self._ack_queue.put(AckPacket(url_id=nxt_id, extra_urls=[]))
                print(ex)
                traceback.print_exc()
                self.progress.set_error("Exited with error: {%s}" % failed)
                break

        sql.close()
        self.progress.clear(
            "Finished." if not failed else "Exited with error: %s" % failed,
            running=False)
Exemplo n.º 25
0
 def setUp(self):
     importlib.reload(sql)
     self.assertFalse(sql._engine, msg="Failed to clear SQL session.")
     settings.load(self.settings_file)
     sql.init_from_settings()
     self.ps = sql.PostSearcher(sql.session())