def run(self): """ thread main function """ rebuild_file = os.path.join(config.FREEVO_CACHEDIR, 'freevo-rebuild-database') # Make sure the movie database is rebuilt at startup util.touch(rebuild_file) while True: try: # Check if we need to update the database # This is a simple way for external apps to signal changes if os.path.exists(rebuild_file): if video.hash_fxd_movie_database() == 0: # something is wrong, deactivate this feature rebuild_file = '/this/file/should/not/exist' for media in config.REMOVABLE_MEDIA: media.drive_status = CDS_NO_INFO #media.get_drive_status() self.check_all() for i in range(4): # wait some time time.sleep(0.5) # check if we need to stop if hasattr(self, 'stop'): return except SystemExit: break except: logger.error('Exception whilst checking removable media!' + traceback.format_exc())
def run(self): """ thread main function """ rebuild_file = os.path.join(config.FREEVO_CACHEDIR, 'freevo-rebuild-database') # Make sure the movie database is rebuilt at startup util.touch(rebuild_file) while 1: # Check if we need to update the database # This is a simple way for external apps to signal changes if os.path.exists(rebuild_file): if video.hash_fxd_movie_database() == 0: # something is wrong, deactivate this feature rebuild_file = '/this/file/should/not/exist' for media in config.REMOVABLE_MEDIA: media.drive_status = None if not rc.app(): # check only in the menu self.check_all() for i in range(4): # wait some time time.sleep(0.5) # check if we need to stop if hasattr(self, 'stop'): return
def startup(client, params): c = None # detect if the collector already exists in the portal f = collector.find_collector(client, params) if not f: logging.debug('Collector not found') c = collector.collector(client, params) c = collector.create_collector(client, c) else: logging.debug('Collector found') c = f # we want to make a note on the fs that we found an existing collector # so that we don't remove it during a future cleanup, but we should # only make this note if this is the first time the container is run # (otherwise, every subsequent should detect the existing collector # that we're going to create below. Not the behavior we want) if not os.path.isfile(config.FIRST_RUN): util.touch(config.COLLECTOR_FOUND) # let subsequent runs know that this isn't the first container run util.touch(config.FIRST_RUN) # detect if collector is already installed if os.path.isdir(config.INSTALL_PATH + config.AGENT_DIRECTORY): logging.debug('Collector already installed.') util.cleanup() return collector.install_collector(client, c, params)
def jekyll_build(checkout_path): ''' ''' checkout_lock = checkout_path + '.jekyll-lock' jekyll_path = join(checkout_path, '_site') built_hash_file = checkout_path + '.built-hash' hash_file = checkout_path + '.commit-hash' if exists(jekyll_path) and is_fresh(jekyll_path): return jekyll_path with locked_file(checkout_lock): do_build = True if exists(built_hash_file): built_hash = open(built_hash_file).read().strip() commit_hash = open(hash_file).read().strip() if built_hash == commit_hash: jlogger.debug('Skipping build to ' + jekyll_path) do_build = False if do_build: jlogger.info('Building jekyll ' + jekyll_path) run_cmd(('jekyll', 'build'), checkout_path) if exists(hash_file): copyfile(hash_file, built_hash_file) touch(jekyll_path) return jekyll_path
def git_checkout(repo_path, checkout_path, ref): ''' Check out a git repository to a given reference and path. This function is assumed to be run in a lock. ''' jlogger.info('Checking out to ' + checkout_path) if not exists(checkout_path): mkdir(checkout_path) hash_file = checkout_path + '.commit-hash' commit_hash = get_ref_sha(repo_path, ref) do_checkout = True if exists(hash_file): previous_hash = open(hash_file).read().strip() if previous_hash == commit_hash: jlogger.debug('Skipping checkout to '+checkout_path) do_checkout = False if do_checkout: run_cmd(('git', '--work-tree='+checkout_path, 'checkout', ref, '--', '.'), repo_path) touch(checkout_path) with open(hash_file, 'w') as file: print >> file, commit_hash
def git_checkout(repo_path, checkout_path, ref): ''' Check out a git repository to a given reference and path. This function is assumed to be run in a lock. ''' jlogger.info('Checking out to ' + checkout_path) if not exists(checkout_path): mkdir(checkout_path) hash_file = checkout_path + '.commit-hash' commit_hash = get_ref_sha(repo_path, ref) do_checkout = True if exists(hash_file): previous_hash = open(hash_file).read().strip() if previous_hash == commit_hash: jlogger.debug('Skipping checkout to ' + checkout_path) do_checkout = False if do_checkout: run_cmd(('git', '--work-tree=' + checkout_path, 'checkout', ref, '--', '.'), repo_path) touch(checkout_path) with open(hash_file, 'w') as file: print >> file, commit_hash
def save(word, path): """ write data to path in json format with filename is word id """ if word is not None: filename = word['id'] cache_path = os.path.join(path, filename + '.json') touch(cache_path) with open(cache_path, 'w') as file: json.dump(word, file, separators=(',', ':')) # minify
def hard_link_in_cwd(filename): with util.directory_created("hardlink_scratch") as directory: with util.chdir(directory): util.touch("a") os.link("a", "b") with util.archive_created(os.path.join("..", "hardlink_archive.xar"), ".") as path: with util.directory_created(os.path.join("..", "hardlink_extracted")) as extracted: subprocess.check_call(["xar", "-x", "-C", extracted, "-f", path]) _assert_same_inodes(os.path.join(extracted, "a"), os.path.join(extracted, "b"))
def crl_stop(argv): """stop - shut down the crawler daemon if it is running usage: crawl stop """ p = optparse.OptionParser() p.add_option('-d', '--debug', action='store_true', default=False, dest='debug', help='run the debugger') p.add_option('-l', '--log', action='store', default='crawler.log', dest='logfile', help='specify the log file') p.add_option('-C', '--context', action='store', default='', dest='context', help="context of crawler (PROD/DEV/TEST)") (o, a) = p.parse_args(argv) if o.debug: pdb.set_trace() rpid_l = running_pid() if rpid_l == []: print("No crawlers are running -- nothing to stop.") return ctx_l = [rpid[1] for rpid in rpid_l] if o.context != '' and o.context not in ctx_l: print("No %s crawler is running -- nothing to stop." % o.context) return if o.context == '': if 1 == len(rpid_l): answer = raw_input("Preparing to stop %s crawler. Proceed? > " % ctx_l[0]) if answer.strip().lower().startswith('y'): print("Stopping the crawler...") util.touch(rpid_l[0][2]) else: print("No action taken") else: # more than one entry in rpid_l print("More than one crawler is running.") print("Please specify a context (e.g., 'crawl stop -C PROD')") else: idx = ctx_l.index(o.context) print("Stopping the %s crawler..." % ctx_l[idx]) util.touch(rpid_l[idx][2])
def api_touch(path): cnt = RETRY_MAX while cnt >= 0: try: touch(path) return except: pass cnt -= 1 if cnt >= 0: time.sleep(RETRY_INTERVAL) raise Exception('failed to touch %s' % str(path))
def hard_link_in_cwd(filename): with util.directory_created("hardlink_scratch") as directory: with util.chdir(directory): util.touch("a") os.link("a", "b") with util.archive_created( os.path.join("..", "hardlink_archive.xar"), ".") as path: with util.directory_created( os.path.join("..", "hardlink_extracted")) as extracted: subprocess.check_call( ["xar", "-x", "-C", extracted, "-f", path]) _assert_same_inodes(os.path.join(extracted, "a"), os.path.join(extracted, "b"))
def _configure(self, reconfigure): configured_flag = self.j('{build_dir}', '.configured') if self.exists(configured_flag): if reconfigure: logger.info("{pkg_name} already configured. Reconfiguring.".format(**self.config)) os.unlink(configured_flag) else: logger.info("{pkg_name} already configured. Continuing".format(**self.config)) return self.ensure_dir('{build_dir}') with chdir(self.config['build_dir']): self.configure() touch(configured_flag)
def Mailer(mailQueue, context): """Collect items from the queue supplied by watchSumissions and watchComments, assemble them into digest form and send out via email. This function is designed to run as its own process, and does not normally return. """ setproctitle("hermod (outgoing mailer for %s)" % context[1]) body = "" itemCount = 0 lastSent = time.time() lastOut = time.time() while True: try: # we'll send at most once per hour timeout = max(0, conf.mail['interval'] - time.time() + lastSent) if itemCount >= conf.mail['maxcount']: timeout = 0 if (time.time() - lastOut) > 60: # don't spam with messages after each update print( "[outmailer] %d items queued, waiting at most %d more seconds for messages..." % (itemCount, timeout)) lastOut = time.time() body = body + mailQueue.get(True if timeout > 0 else False, timeout) itemCount = itemCount + 1 except queue.Empty: if itemCount > 0: print("[outmailer] sending mail to %s..." % context[1]) with SMTP_SSL(conf.mail['smtphost']) as smtp: smtp.login(conf.mail['username'], conf.mail['password']) msg = MIMEText(body) msg['Subject'] = "Activity on Reddit - %d items" % itemCount msg['From'] = conf.mail['sender'] msg['To'] = context[1] smtp.sendmail(conf.mail['sender'], context[1], msg.as_string()) body = "" itemCount = 0 lastSent = time.time() util.touch(".hermod_ts")
def fxd_write(self, fxdfile): """ Create or update fxd file for a disc set """ try: fxd = util.fxdparser.FXD(fxdfile +'.fxd') fxd.set_handler('copyright', self.fxd_set_copyright, 'w', True) if self.isdiscset: fxd.set_handler('disc-set', self.fxd_set_discset, 'w', True) else: fxd.set_handler('movie', self.fxd_set_movie, 'w', True) fxd.save() util.touch(os.path.join(config.FREEVO_CACHEDIR, 'freevo-rebuild-database')) except (Exception) as error: logger.error('Error creating/updating fxd file ' + fxdfile + '.fxd, skipping. Error=\'%s\'', error) # now we need to rebuild the cache util.touch(os.path.join(config.FREEVO_CACHEDIR, 'freevo-rebuild-database'))
def startup(client, params): c = None # if the kubernetes param is specified, assume this is part of a # collector set and parse the id accordingly, bypassing other id lookups if params['kubernetes']: logging.debug('Kubernetes mode enabled. Parsing id from environment') collector_id = kubernetes.get_collector_id() logging.debug('Parsed id ' + str(collector_id)) params['collector_id'] = collector_id # detect if the collector already exists in the portal f = collector.find_collector(client, params) if not f: logging.debug('Collector not found') if params['kubernetes']: err = 'Running in kubernetes mode but existing collector not found' util.fail(err) c = collector.collector(client, params) c = collector.create_collector(client, c) else: logging.debug('Collector found') c = f # we want to make a note on the fs that we found an existing collector # so that we don't remove it during a future cleanup, but we should # only make this note if this is the first time the container is run # (otherwise, every subsequent should detect the existing collector # that we're going to create below. Not the behavior we want) if not os.path.isfile(config.FIRST_RUN): util.touch(config.COLLECTOR_FOUND) # let subsequent runs know that this isn't the first container run util.touch(config.FIRST_RUN) # detect if collector is already installed if os.path.isdir(config.INSTALL_PATH + config.AGENT_DIRECTORY): logging.debug('Collector already installed.') util.cleanup() return collector.install_collector(client, c, params)
def git_fetch(repo_path, ref, sha): ''' Run `git fetch` inside a local git repository. ''' jlogger.info('Fetching in ' + repo_path) try: found_sha = get_ref_sha(repo_path, ref) except RuntimeError: # # Account for a missing ref by performing a complete fetch. # jlogger.debug('Complete fetch in ' + repo_path) run_cmd(('git', 'fetch'), repo_path) found_sha = get_ref_sha(repo_path, ref) if sha == found_sha: jlogger.debug('Skipping fetch in ' + repo_path) else: run_cmd(('git', 'fetch'), repo_path) touch(repo_path)
def git_fetch(repo_path, ref, sha): ''' Run `git fetch` inside a local git repository. ''' jlogger.info('Fetching in ' + repo_path) try: found_sha = get_ref_sha(repo_path, ref) except RuntimeError: # # Account for a missing ref by performing a complete fetch. # jlogger.debug('Complete fetch in '+repo_path) run_cmd(('git', 'fetch'), repo_path) found_sha = get_ref_sha(repo_path, ref) if sha == found_sha: jlogger.debug('Skipping fetch in '+repo_path) else: run_cmd(('git', 'fetch'), repo_path) touch(repo_path)
def _mixed_length(filename, *args, **kwargs): with _data_test(filename, *args, **kwargs) as directory: util.touch(os.path.join(directory, "mixed_empty")) with open(os.path.join(directory, "mixed_small"), "w") as f: f.write("1234")
util.mkdir_p("xml/nonexistant") api_key = os.environ["ECHO_NEST_API_KEY"] if len(sys.argv) != 2: # No argument given, do entire xml/raw directory for dirpath, dirnames, filenames in os.walk("xml/raw"): for filename in filenames: full_path = os.path.join(dirpath, filename) print full_path, exist_path = os.path.join("xml/existing", filename) if os.path.isfile(exist_path): print " o" continue nonexist_path = os.path.join("xml/nonexistant", filename) if os.path.isfile(nonexist_path): print " x" continue if found_song(full_path, api_key): print " O" shutil.copyfile(full_path, exist_path) else: util.touch(nonexist_path) print " X" time.sleep(0.5) else: if found_song(sys.argv[1], api_key): print os.path.join("xml/existing", os.path.basename(sys.argv[1]))
i.write("%s</file-opt>\n" % self.str2XML(fname)) i.write(" </disc>\n") else: i.write(" </disc>\n") #image if self.image: i.write(" <cover-img source=\"%s\">" % self.str2XML(self.image_url)) i.write("%s</cover-img>\n" % self.str2XML(self.image)) #print info i.write(self.print_info()) #close tags i.write(" </disc-set>\n") i.write("</freevo>\n") util.touch(os.path.join(config.FREEVO_CACHEDIR, 'freevo-rebuild-database')) def write_movie(self): """Write <movie> to fxd file""" try: i = vfs.codecs_open( (self.fxdfile + '.fxd') , 'w', encoding='utf-8') except IOError, error: raise FxdImdb_IO_Error("Writing FXD file failed : " + str(error)) return #header i.write("<?xml version=\"1.0\" ?>\n<freevo>\n") i.write(" <copyright>\n" + " The information in this file are from the Internet " +
def process_command(command_list): Mode = get_mode() commands = deque(command_list) command = commands.popleft() if len(commands) > 0: # which = [commands.popleft()] # exclude here commands which cannot take multiple container names but instead take further args if command != "enter": which = commands else: deps = mem.sorted_run_deps deps.remove('global') which = deps if command == 'fill': for container in which: fill_container(container, mem.config[container]) elif command == 'stack': for container in which: if mem.config[container].has_key('run') and not mem.config[container]['run']: pass else: stack_container(container, mem.config[container]) elif command == 'unstack': for container in reversed(which): if mem.config[container].has_key('run') and not mem.config[container]['run']: pass else: unstack_container(container) elif command == 'restack': for container in which: if mem.config[container].has_key('run') and not mem.config[container]['run']: pass else: unstack_container(container) stack_container(container, mem.config[container]) elif command == "status": for container in which: if mem.config[container].has_key('run') and not mem.config[container]['run']: pass else: container_status(container) Mode = get_mode() print ( "Mode is " + Mode['noisy'] + ", " + Mode['destructive'], end="" ) elif command == "empty": for container in reversed(which): unstack_container(container) empty_container(container, mem.config[container]) elif command == "refill": for container in which: unstack_container(container) if Mode['destructive'] == 'destructive': empty_container(container, mem.config[container]) fill_container(container, mem.config[container]) elif command == "enter": # if len(which) > 1: # raise(Exception("You may only enter one container at a time. Please provide container name")) enter_container(commands) elif command == "refstk": if len(which) > 1: raise (Exception("You may only refstk one container at a time. Please provide container name")) container = which[0] unstack_container(container) if Mode['destructive'] == 'destructive': empty_container(container, mem.config[container]) fill_container(container, mem.config[container]) stack_container(container, mem.config[container]) elif command == "edit": edit_yaml() elif command == "editd": for container in which: if mem.config[container].has_key('build'): edit_dockerfile(container) elif command == "mode": MODE_USAGE = "Mode can either be 'noisy', 'quiet', 'destructive', 'conservative'" if len(command_list) == 1: print_mode() return mode = command_list[1] config_path = mem.ConfigPath + "/" + mem.Project quietFile = config_path + "/quiet" noisyFile = config_path + "/noisy" destructFile = config_path + "/destructive" conserveFile = config_path + "/conservative" if mode == "noisy": if os.path.isfile(quietFile): os.remove(quietFile) touch(noisyFile) elif mode == "quiet": if os.path.isfile(noisyFile): os.remove(noisyFile) touch(quietFile) elif mode == "destructive": if os.path.isfile(conserveFile): os.remove(conserveFile) touch(destructFile) elif mode == "conservative": if os.path.isfile(destructFile): os.remove(destructFile) touch(conserveFile) else: print ( MODE_USAGE, end="" ) return elif command == "test": run_tests() else: raise Exception("Unknown command '" + command + "'")
parser = OptionParser() parser.add_option("-c", "--cleanup", action="store_true", default=False) (options, args) = parser.parse_args(argv) DIR = "commit" if options.cleanup: rmtree(DIR, ignore_errors=True) print("commit.py clean") else: if os.path.isdir(DIR): raise SystemExit("This test script has already been run. Please call this script with --cleanup to start again") os.mkdir(DIR) g = GittyupClient() g.initialize_repository(DIR) touch(DIR + "/test1.txt") touch(DIR + "/test2.txt") g.stage([DIR+"/test1.txt", DIR+"/test2.txt"]) g.commit("First commit", commit_all=True) change(DIR + "/test1.txt") g.stage([DIR+"/test1.txt"]) g.commit("Second commit", author="Alex Plumb <*****@*****.**>") print("commit.py pass")
parser.add_option("-c", "--cleanup", action="store_true", default=False) (options, args) = parser.parse_args(argv) DIR = "commit" if options.cleanup: rmtree(DIR, ignore_errors=True) print("commit.py clean") else: if os.path.isdir(DIR): raise SystemExit( "This test script has already been run. Please call this script with --cleanup to start again" ) os.mkdir(DIR) g = GittyupClient() g.initialize_repository(DIR) touch(DIR + "/test1.txt") touch(DIR + "/test2.txt") g.stage([DIR + "/test1.txt", DIR + "/test2.txt"]) g.commit("First commit", commit_all=True) change(DIR + "/test1.txt") g.stage([DIR + "/test1.txt"]) g.commit("Second commit", author="Alex Plumb <*****@*****.**>") print("commit.py pass")
def process_command(command_list): Mode = get_mode() commands = deque(command_list) command = commands.popleft() if len(commands) > 0: # which = [commands.popleft()] # exclude here commands which cannot take multiple container names but instead take further args if command != "enter": which = commands else: deps = mem.sorted_run_deps deps.remove('global') which = deps if command == 'fill': for container in which: fill_container(container, mem.config[container]) elif command == 'stack': for container in which: if mem.config[container].has_key( 'run') and not mem.config[container]['run']: pass else: stack_container(container, mem.config[container]) elif command == 'unstack': for container in reversed(which): if mem.config[container].has_key( 'run') and not mem.config[container]['run']: pass else: unstack_container(container) elif command == 'restack': for container in which: if mem.config[container].has_key( 'run') and not mem.config[container]['run']: pass else: unstack_container(container) stack_container(container, mem.config[container]) elif command == "status": for container in which: if mem.config[container].has_key( 'run') and not mem.config[container]['run']: pass else: container_status(container) Mode = get_mode() print "Mode is " + Mode['noisy'] + ", " + Mode['destructive'] elif command == "empty": for container in reversed(which): unstack_container(container) empty_container(container, mem.config[container]) elif command == "refill": for container in which: unstack_container(container) if Mode['destructive'] == 'destructive': empty_container(container, mem.config[container]) fill_container(container, mem.config[container]) elif command == "enter": # if len(which) > 1: # raise(Exception("You may only enter one container at a time. Please provide container name")) enter_container(commands) elif command == "refstk": if len(which) > 1: raise (Exception( "You may only refstk one container at a time. Please provide container name" )) container = which[0] unstack_container(container) if Mode['destructive'] == 'destructive': empty_container(container, mem.config[container]) fill_container(container, mem.config[container]) stack_container(container, mem.config[container]) elif command == "edit": edit_yaml() elif command == "editd": for container in which: if mem.config[container].has_key('build'): edit_dockerfile(container) elif command == "mode": MODE_USAGE = "Mode can either be 'noisy', 'quiet', 'destructive', 'conservative'" if len(command_list) == 1: print_mode() return mode = command_list[1] config_path = mem.ConfigPath + "/" + mem.Project quietFile = config_path + "/quiet" noisyFile = config_path + "/noisy" destructFile = config_path + "/destructive" conserveFile = config_path + "/conservative" if mode == "noisy": if os.path.isfile(quietFile): os.remove(quietFile) touch(noisyFile) elif mode == "quiet": if os.path.isfile(noisyFile): os.remove(noisyFile) touch(quietFile) elif mode == "destructive": if os.path.isfile(conserveFile): os.remove(conserveFile) touch(destructFile) elif mode == "conservative": if os.path.isfile(destructFile): os.remove(destructFile) touch(conserveFile) else: print MODE_USAGE return elif command == "test": run_tests() else: raise Exception("Unknown command '" + command + "'")
def _zero_length(filename, *args, **kwargs): with _data_test(filename, *args, **kwargs) as directory: util.touch(os.path.join(directory, "empty"))
DIR = "remove" if options.cleanup: rmtree(DIR, ignore_errors=True) print("remove.py clean") else: if os.path.isdir(DIR): raise SystemExit("This test script has already been run. Please call this script with --cleanup to start again") os.mkdir(DIR) g = GittyupClient() g.initialize_repository(DIR) touch(DIR + "/test.txt") # Stage and commit the file g.stage([DIR+"/test.txt"]) g.commit("Adding test.txt") g.remove([DIR+"/test.txt"]) st = g.status() assert (not os.path.exists(DIR+"/test.txt")) assert (g.is_staged(DIR+"/test.txt")) assert (st[0] == RemovedStatus) g.unstage([DIR+"/test.txt"]) st = g.status() assert (not os.path.exists(DIR+"/test.txt")) assert (not g.is_staged(DIR+"/test.txt"))
if options.cleanup: rmtree(DIR, ignore_errors=True) print("remove.py clean") else: if os.path.isdir(DIR): raise SystemExit( "This test script has already been run. Please call this script with --cleanup to start again" ) os.mkdir(DIR) g = GittyupClient() g.initialize_repository(DIR) touch(DIR + "/test.txt") # Stage and commit the file g.stage([DIR + "/test.txt"]) g.commit("Adding test.txt") g.remove([DIR + "/test.txt"]) st = g.status() assert (not os.path.exists(DIR + "/test.txt")) assert (g.is_staged(DIR + "/test.txt")) assert (st[0] == RemovedStatus) g.unstage([DIR + "/test.txt"]) st = g.status() assert (not os.path.exists(DIR + "/test.txt")) assert (not g.is_staged(DIR + "/test.txt"))
#!/usr/bin/env python """ gn can only run python scripts. Generates flatbuffer TypeScript code. """ import subprocess import sys import os import shutil import util # TODO(ry) Ideally flatc output files should be written into target_gen_dir, but # its difficult to get this working in a way that parcel can resolve their # location. (Parcel does not support NODE_PATH.) Therefore this hack: write the # generated msg_generated.ts outputs into the js/ folder, and we check them into # the repo. Hopefully this hack can be removed at some point. If msg.fps is # changed, commit changes to the generated JS file. src = sys.argv[1] dst = sys.argv[2] stamp_file = sys.argv[3] shutil.copyfile(src, dst) util.touch(stamp_file)
util.mkdir_p('xml/nonexistant') api_key = os.environ['ECHO_NEST_API_KEY'] if len(sys.argv) != 2: # No argument given, do entire xml/raw directory for dirpath, dirnames, filenames in os.walk('xml/raw'): for filename in filenames: full_path = os.path.join(dirpath, filename) print full_path, exist_path = os.path.join('xml/existing', filename) if os.path.isfile(exist_path): print ' o' continue nonexist_path = os.path.join('xml/nonexistant', filename) if os.path.isfile(nonexist_path): print ' x' continue if found_song(full_path, api_key): print ' O' shutil.copyfile(full_path, exist_path) else: util.touch(nonexist_path) print ' X' time.sleep(0.5) else: if found_song(sys.argv[1], api_key): print os.path.join('xml/existing', os.path.basename(sys.argv[1]))
for opts in self.file_opts: mplopts, fname = opts i.write(" <file-opt mplayer-options=\"%s\">" % self.str2XML(mplopts)) i.write("%s</file-opt>\n" % self.str2XML(fname)) i.write(" </disc>\n") else: i.write(" </disc>\n") if self.image: i.write(" <cover-img source=\"%s\">" % self.str2XML(self.image_url)) i.write("%s</cover-img>\n" % self.str2XML(self.image)) i.write(self.print_info()) i.write(" </disc-set>\n") i.write("</freevo>\n") # now we need to rebuild the cache util.touch(os.path.join(config.FREEVO_CACHEDIR, 'freevo-rebuild-database')) def write_movie(self): """Write <movie> to fxd file""" try: i = vfs.codecs_open((self.fxdfile+'.fxd') , 'w', encoding='utf-8') except IOError, error: raise FxdImdb_IO_Error("Writing FXD file failed : " + str(error)) return #header i.write("<?xml version=\"1.0\" ?>\n<freevo>\n") i.write(" <copyright>\n") i.write(" The information in this file are from the Internet Movie Database (IMDb).\n")