Exemplo n.º 1
0
 def __init__(self):
     self.cnfg = u.get_config(cfg='/Users/aliciadetelich/as_tools_config.yml')
     self.header_row, self.csvfile = u.opencsv(self.cnfg['input_csv'])
     self.rowcount = row_count = sum(1 for line in open(self.cnfg['input_csv']).readlines()) - 1
     self.api_url = 'http://api.snaccooperative.org/'
     self.headers = {'Content-type': 'application/json','Accept': 'text/plain'}
     self.q = {'command': 'read','sameas': 'lcnaf_uri'}
Exemplo n.º 2
0
def run_db_query():
    config_file = utilities.get_config()
    as_db = dbssh.DBConn(config_file=config_file)
    open_query = open('get_unstructured_dates.sql', 'r', encoding='utf-8')
    query_data = as_db.run_query(open_query.read())
    q = query_data.values.tolist()
    return q
def main():
	home_dir = str(Path.home())
	config_file = u.get_config(cfg=home_dir + '\\config.yml')
	api_url, headers = u.login(url=config_file['api_url'], username=config_file['api_username'], password=config_file['api_password'])
	print(f'Connected to {api_url}')
	output_file = config_file['api_output_csv']
	print('Welcome to the location barcode lookup tool. Scan barcodes below. Enter QUIT to exit.')
	done = False
	x = 0
	while not done:
		x += 1
		barcode = input('Location barcode: ')
		if barcode == 'QUIT':
			break
		else:
			location_uri, data = search_locations(api_url, headers, barcode)
			#will need to change this again so header row is not repeated
			write_output(data, x, output_file)
			print('Would you like to add containers to this location? Enter Y or N.')
			decision = input('Y/N: ')
			if decision == 'Y':
				#this pulls the location URI for the barcode
				barcode_list = process_barcodes(api_url, headers, location_uri)
				search_and_update(api_url, headers, barcode_list, location_uri)
			elif decision == 'N':
				pass
			print('Would you like to remove containers from this location? Enter Y or N.')
			decision_2 = input('Y/N: ')
			if decision_2 == 'Y':
				unassociate_containers(api_url, headers, data)
			elif decision_2 == 'N':
				pass
Exemplo n.º 4
0
 def __init__(self, *sesh):
     self.config_file = u.get_config(cfg=str(Path.home()) +
                                     '/as_tools_config.yml')
     self.api_url = self.config_file['api_url']
     self.username = self.config_file['api_username']
     self.password = self.config_file['api_password']
     if not sesh:
         self.sesh = as_session(api_url=self.api_url,
                                username=self.username,
                                password=self.password)
     else:
         self.sesh = sesh
     self.schemas = self.get_schemas()
     self.all_enumerations = self.get_dynamic_enums()
     self.schema_exclusions = [
         line.strip('\n') for line in open('fixtures/schema_exclusions.csv',
                                           encoding='utf-8')
     ]
     self.property_exclusions = [
         line.strip('\n')
         for line in open('fixtures/property_exclusions.csv',
                          encoding='utf-8')
     ]
     self.jsonmodel_pattern = re.compile(
         '(JSONModel)(\(:.*?\)\s)(uri|object|uri_or_object)')
Exemplo n.º 5
0
def config_file_helper():
    config_file = u.get_config(cfg='as_tools_config.yml')

    with open('as_tools_config.yml', 'r', encoding='utf-8') as config_file:
        cfg = json.load(config_file)
    cfg[config_type] = value
    with open('data/config.json', 'w', encoding='utf-8') as config_file:
        json.dump(cfg, config_file)
    return value
Exemplo n.º 6
0
def run_client():
    utilities.config = utilities.get_config()
    config = utilities.config
    utilities.client = TelegramClient("sessions_bot",
                                      config["api_id"],
                                      config["api_hash"],
                                      loop=loop)
    utilities.client.start()
    utilities.load_plugins()
    utilities.plugins.sort(key=sort_key)
    utilities.public_plugins.sort(key=sort_key)
Exemplo n.º 7
0
async def run(message, matches, chat_id, step, crons=None):
    upd = ""
    for line in runGitPull():
        upd = upd + line.decode("utf-8")
    if "Already" in upd:
        return [message.reply("❏︙تم عاده تشغيل البوت وتم تحديث السورس")]
    else:
        utilities.config = utilities.get_config()
        utilities.config["updateChat"] = message.chat_id
        utilities.save_config()
        await message.reply("❏︙جاري تحديث السورس رجاء الانتضار منفضلك")
        restartBot()
    return []
def main():
    home_dir = str(Path.home())
    config_file = u.get_config(cfg=home_dir + '/config.yml')
    api_url, headers = u.login(url=config_file['api_url'],
                               username=config_file['api_username'],
                               password=config_file['api_password'])
    output_file = config_file['api_output_csv']
    #want to be able to do this more than once - ORRR just have this as a command line argument like the dupe detect
    if len(sys.argv) > 1:
        barcodes = sys.argv[1:]
        for i, barcode in enumerate(barcodes):
            data = search_locations(api_url, headers, barcode)
            #need to fix this so that if there's already a header row don't create another one
            write_output(data, i, output_file)
Exemplo n.º 9
0
 def __init__(self):
     '''Need to test what happens if I change a value in the config file...'''
     self.config_file = u.get_config(cfg='as_tools_config.yml')
     self.api_url = self.config_file['api_url']
     self.username = self.config_file['api_username']
     self.password = self.config_file['api_password']
     #is this what I want?
     self.dirpath = u.setdirectory(self.config_file['backup_directory'])
     #this can be changed, the csvdict function will need to be called again
     self.csvfile = u.opencsvdict(self.config_file['input_csv'])
     self.sesh, self.sesh_file = as_session(self.api_url, self.username,
                                            self.password)
     self.json_data = ASJsonData()
     self.crud = ASCrud(self.config_file, self.sesh)
Exemplo n.º 10
0
async def run(message, matches, chat_id, step, crons=None):
    upd = ""
    for line in runGitPull():
        upd = upd + line.decode("utf-8")
    if "Already" in upd:
        return [message.reply("The source is up to date.")]
    else:
        utilities.config = utilities.get_config()
        utilities.config["updateChat"] = message.chat_id
        utilities.save_config()
        await message.reply(
            "The source has been updated,the bot will restart please wait."
        )
        restartBot()
    return []
    def __init__(self, sesh=None):
        self.config_file = utes.get_config(cfg=str('config.yml')
        self.api_url = self.config_file['api_url']
        self.username = self.config_file['api_username']
        self.password = self.config_file['api_password']
        #is this what I want?
        self.dirpath = utes.setdirectory(self.config_file['backup_directory'])
        #this can be changed, the csvdict function will need to be called again
        self.csvfile = utes.opencsvdict(self.config_file['input_csv'])
        self.ead_3_transformation = self.config_file['ead_3_transformation']
        self.ead_3_schema_path = self.config_file['ead_3_schema']
        self.manifest_path = self.config_file['manifest_path']
        self.transformation_errors = utes.openoutfile(self.config_file['transformation_errors'])
        self.validation_errors = utes.openoutfile(self.config_file['validation_errors'])
        self.saxon_path = self.config_file['saxon_path']
        #self.ead_3_transformation = requests.get("https://raw.githubusercontent.com/YaleArchivesSpace/EAD3-to-PDF-UA/master/xslt-to-update-the-ASpace-export/yale.aspace_v2_to_yale_ead3.xsl").text
        self.ead_3_schema = self.prep_schema_for_validation()
        if sesh is None:
            self.sesh = as_session(api_url=self.api_url, username=self.username, password=self.password)
        else:
            self.sesh = sesh

    def log_subprocess_output(self, pipe):
        for line in iter(pipe.readline, b''):
            logging.warning(line)

    def prep_schema_for_validation(self):
        ead_3_schema_doc = etree.parse(self.ead_3_schema_path)
        return etree.XMLSchema(ead_3_schema_doc)

    def export_ead(self, row, ead3=True, get_ead=None):
        '''Exports EAD files using a list of resource IDs as input.

           Parameters:
            row['resource_id']: The ID of the resource
            row['repo_id']: The ID of the repository

           Returns:
            str: A string representation of the EAD response from the ArchivesSpace API.
        '''
        repo_id = row['repo_id']
        resource_id = row['resource_id']
        print(f'Exporting {resource_id}')
        if ead3 == True:
            get_ead = self.sesh.get(f"{self.api_url}/repositories/{repo_id}/resource_descriptions/{resource_id.strip()}.xml?include_unpublished=true&ead3=true", stream=True).text
        elif ead3 == False:
            get_ead = self.sesh.get(f"{self.api_url}/repositories/{repo_id}/resource_descriptions/{resource_id.strip()}.xml?include_unpublished=true", stream=True).text
        print(f'{resource_id} exported. Writing to file.')
        ead_file_path = f"{self.dirpath}/{resource_id}.xml"
        with open(ead_file_path, 'a', encoding='utf-8') as outfile:
            outfile.write(get_ead)
        print(f'{resource_id} written to file: {ead_file_path}')
        return ead_file_path

    def transform_ead_3(self, ead_file_path):
        '''Transforms EAD files using a user-defined XSLT file.'''
        print(f'Transforming file: {ead_file_path}')
        subprocess.run(["java", "-cp", f"{self.saxon_path}", "net.sf.saxon.Transform",
                        f"-s:{ead_file_path}",
                        f"-xsl:{self.ead_3_transformation}",
                        f"-o:{ead_file_path[:-4]}_out.xml"], stdout=self.transformation_errors, stderr=subprocess.STDOUT,
                       encoding='utf-8')
        #if proc.stderr:
         #   self.log_subprocess_output(proc.stderr)
        #this doesn't mean that it was successful...
        print(f'Transformation finished: {ead_file_path}')
        return f"{ead_file_path[:-4]}_out.xml"
        #return open(f"{ead_file_path[:-4]}_out.xml", 'r', encoding='utf-8').read()

    def validate_ead_3(self, ead_file_path):
        print(f'Validating file: {ead_file_path}')
        try:
            #print(type(ead_file_path))
            with open(ead_file_path, 'r', encoding='utf-8') as open_ead:
                doc = etree.parse(open_ead)
                try:
                    self.ead_3_schema.assertValid(doc)
                    #self.validation_errors.write(f'{ead_file_path} is valid')
                    logging.warning(f'\n\n{ead_file_path} is valid')
                except etree.DocumentInvalid as err:
                    #self.validation_errors.write(f'Schema Validation Error: {ead_file_path}')
                    #self.validation_errors.write(traceback.format_exc())
                    #self.validation_errors.write(err.error_log)
                    logging.warning(f'\n\nSchema Validation Error: {ead_file_path}')
                    #logging.exception('Error: ')
                    logging.warning(err.error_log)
                except Exception:
                    #self.validation_errors.write(f'Other validation error: {ead_file_path}')
                    logging.warning(f'\n\nOther validation error: {ead_file_path}')
                    logging.exception('Error: ')
                    #self.validation_errors.write(traceback.format_exc())
        #this finds a problem with the file
        except IOError:
            #self.validation_errors.write(f'Invalid file: {ead_file_path}')
            #self.validation_errors.write(traceback.format_exc())
            logging.warning(f'\n\nInvalid file: {ead_file_path}')
            logging.exception('Error: ')
        #this finds syntax errors in XML
        except etree.XMLSyntaxError as err:
            #self.validation_errors.write(f'XML Syntax Error: {ead_file_path}')
            #self.validation_errors.write(traceback.format_exc())
            #self.validation_errors.write(err.error_log)
            logging.warning(f'\n\nXML Syntax Error: {ead_file_path}')
            logging.warning(err.error_log)
            logging.exception('Error: ')
        except Exception:
            #self.validation_errors.write(f'Other validation error: {ead_file_path}')
            #self.validation_errors.write(traceback.format_exc())
            logging.warning(f'\n\nOther validation error: {ead_file_path}')
            logging.exception('Error: ')
        print(f'Validation complete: {ead_file_path}')


    def export_transform_validate_ead3(self, row):
        '''Runs export, transform, and validate EAD functions using a user-defined schema file.'''
        ead_file_path = self.export_ead(row)
        transformed_ead_path = self.transform_ead_3(ead_file_path)
        validated_ead = self.validate_ead_3(transformed_ead_path)


if __name__ == "__main__":
    error_log()
    ead_utes = EADUtils()
    logging.warning(f'''Starting logging for EAD export.
                        API URL: {ead_utes.api_url}
                        CSV Input: {ead_utes.config_file['input_csv']}
                    ''')
    for row in ead_utes.csvfile:
        #instead of worrying about real time logging could just do a tqdm bar. The only problem is if it aborts somehow
        ead_utes.export_transform_validate_ead3(row)



'''Standalone script for EAD3 transformations.
Exemplo n.º 12
0
 def __init__(self):
     self.config_file = u.get_config(cfg='as_tools_config.yml')
     self.dirpath = u.setdirectory(self.config_file['backup_directory'])
     self.csvfile = u.opencsvdict(self.config_file['input_csv'])
     self.dbconn = dbssh.DBConn(config_file=self.config_path)
     self.query_data = ASQueries()
Exemplo n.º 13
0
#local imports
from queries import ASQueries
from json_data import ASJsonData
from crud import ASCrud
from templates import ASTemplates
from aspace_run import ASpaceRun, as_session
import app_renderers as renderers
from app_layouts import serve_layout
import app_dropdown_values as dv

import aspace_tools_logging as atl

logger = atl.logging.getLogger(__name__)

config_file = u.get_config(cfg=str(Path.home()) + '/as_tools_config.yml')

app = dash.Dash(__name__)
app.config.suppress_callback_exceptions = True
app.layout = serve_layout

#############################
#### Query App Callbacks ####
#############################

#CAN'T I JUST PUT THE DIVIDER IN THE INPUT DIV???


@app.callback(Output('input_div',
                     'children'), [Input('selected_query_dropdown', 'value')],
              [State('query_type_dropdown', 'value')])