#!/usr/bin/env python

import os
import urllib2
from cStringIO import StringIO
from lxml import etree as ET
from mbslave import Config, connect_db
from mbslave.search import fetch_all_updated

cfg = Config(os.path.join(os.path.dirname(__file__), 'mbslave.conf'))
db = connect_db(cfg, True)

xml = StringIO()
xml.write('<update>\n')
for doc in fetch_all_updated(cfg, db):
    xml.write(ET.tostring(doc))
    xml.write('\n')
xml.write('</update>\n')

req = urllib2.Request(cfg.solr.url + '/update?commit=true', xml.getvalue(),
    {'Content-Type': 'application/xml; encoding=UTF-8'})
resp = urllib2.urlopen(req)
the_page = resp.read()

doc = ET.fromstring(the_page)
status = doc.find("lst[@name='responseHeader']/int[@name='status']")
if status.text != '0':
    print the_page
    raise SystemExit(1)

db.commit()
Exemple #2
0
#!/usr/bin/env python

import os
import itertools
from lxml import etree as ET
from lxml.builder import E
from mbslave import Config, connect_db
from mbslave.search import fetch_all

cfg = Config(os.path.join(os.path.dirname(__file__), 'mbslave.conf'))
db = connect_db(cfg)

print '<add>'
for doc in fetch_all(cfg, db):
    print ET.tostring(doc)
print '</add>'

#!/usr/bin/env python

import os
import urllib2
from cStringIO import StringIO
from lxml import etree as ET
from mbslave import Config, connect_db
from mbslave.search import fetch_all_updated

cfg = Config(os.path.join(os.path.dirname(__file__), 'mbslave.conf'))
db = connect_db(cfg, True)

xml = StringIO()
xml.write('<update>\n')
for doc in fetch_all_updated(cfg, db):
    xml.write(ET.tostring(doc))
    xml.write('\n')
xml.write('</update>\n')

req = urllib2.Request(cfg.solr.url + '/update?commit=true', xml.getvalue(),
                      {'Content-Type': 'application/xml; encoding=UTF-8'})
resp = urllib2.urlopen(req)
the_page = resp.read()

doc = ET.fromstring(the_page)
status = doc.find("lst[@name='responseHeader']/int[@name='status']")
if status.text != '0':
    print the_page
    raise SystemExit(1)

db.commit()
Exemple #4
0
    url = base_url + "/replication-%d.tar.bz2" % replication_seq
    print "Downloading", url
    try:
        data = urllib2.urlopen(url)
    except urllib2.HTTPError, e:
        if e.code == 404:
            return None
        raise
    tmp = tempfile.NamedTemporaryFile(suffix='.tar.bz2')
    shutil.copyfileobj(data, tmp)
    data.close()
    tmp.seek(0)
    return tmp

config = Config(os.path.dirname(__file__) + '/mbslave.conf')
db = connect_db(config)

base_url = config.get('MUSICBRAINZ', 'base_url')
ignored_tables = set(config.get('TABLES', 'ignore').split(','))

if config.solr.enabled:
    from mbslave.search import SolrReplicationHook
    hook_class = SolrReplicationHook
else:
    hook_class = ReplicationHook

cursor = db.cursor()
cursor.execute("SELECT current_schema_sequence, current_replication_sequence FROM %s.replication_control" % config.schema.name('musicbrainz'))
schema_seq, replication_seq = cursor.fetchone()

status = StatusReport(schema_seq, replication_seq)
Exemple #5
0
    url = base_url + "/replication-%d.tar.bz2" % replication_seq
    print "Downloading", url
    try:
        data = urllib2.urlopen(url)
    except urllib2.HTTPError, e:
        if e.code == 404:
            return None
        raise
    tmp = tempfile.NamedTemporaryFile(suffix='.tar.bz2')
    shutil.copyfileobj(data, tmp)
    data.close()
    tmp.seek(0)
    return tmp

config = Config(os.path.dirname(__file__) + '/mbslave.conf')
db = connect_db(config)

schema = config.get('DATABASE', 'schema')
base_url = config.get('MUSICBRAINZ', 'base_url')
ignored_tables = set(config.get('TABLES', 'ignore').split(','))

if config.solr.enabled:
    from mbslave.search import SolrReplicationHook
    hook_class = SolrReplicationHook
else:
    hook_class = ReplicationHook

cursor = db.cursor()
cursor.execute("SELECT current_schema_sequence, current_replication_sequence FROM %s.replication_control" % schema)
schema_seq, replication_seq = cursor.fetchone()
Exemple #6
0
            print " - Ignoring", name
            continue
        if not check_table_exists(db, schema, table):
            print " - Skipping %s (table %s does not exist)" % (name, fulltable)
            continue
        cursor.execute("SELECT 1 FROM %s LIMIT 1" % fulltable)
        if cursor.fetchone():
            print " - Skipping %s (table %s already contains data)" % (name, fulltable)
            continue
        print " - Loading %s to %s" % (name, fulltable)
        cursor.copy_from(tar.extractfile(member), fulltable)
        db.commit()


config = Config(os.path.dirname(__file__) + '/mbslave.conf')
db = connect_db(config)

ignored_tables = set(config.get('TABLES', 'ignore').split(','))
for filename in sys.argv[1:]:
    load_tar(filename, db, config, ignored_tables)


########NEW FILE########
__FILENAME__ = mbslave-psql
#!/usr/bin/env python

import os
from optparse import OptionParser
from mbslave import Config, connect_db

parser = OptionParser()