def ss_attach_list_of_image_files(access_token, sheet_name,
                                  conditional_field_name,
                                  conditional_field_value, attachList):

    if settings.YH_SS_PRODUCTION_SITE:
        # Initialize client proxy.server:3128 and provide access token
        proxies = {'https': 'https://proxy.server:3128'}
        ss = smartsheet.Smartsheet(proxies=proxies, access_token=access_token)
    else:  # just provide access token
        # Instantiate smartsheet and specify access token value.
        ss = smartsheet.Smartsheet(access_token)

    # Get the id for the Sheet name
    search_results = ss.Search.search(sheet_name).results
    sheet_id = next(result.object_id for result in search_results
                    if result.object_type == 'sheet')

    # Get all the columns for the sheet
    all_columns = ss.Sheets.get_columns(sheet_id, include_all=True)
    columns = all_columns.data

    # Create two reference dictionaries that will be useful in the subsequent code
    # colMap {column-name: column-id } and colMapRev { column-id: column-name }
    colMap = {}
    colMapRev = {}
    for col in columns:
        colMap[col.title] = col.id
        colMapRev[col.id] = col.title

    # Create an array of ColumnIds to limit the returned dataset
    col_ids = []
    col_ids.append(colMap.get(conditional_field_name))

    # Get the Sheet Data and convert to json
    MySheet = ss.Sheets.get_sheet(sheet_id, column_ids=col_ids)
    MySheetjson = json.loads(str(MySheet))

    for MyRow in MySheetjson["rows"]:
        for MyCell in MyRow["cells"]:
            if colMapRev.get(MyCell.get(
                    "columnId")) == conditional_field_name and MyCell.get(
                        "value") == conditional_field_value:
                #print("found it", MyRow["id"])
                for attach_file in attachList:
                    if Path(attach_file).suffix == '.png':
                        mime_type = 'image/png'
                    else:
                        mime_type = 'image/jpeg'

                    updated_attachment = ss.Attachments.attach_file_to_row(
                        sheet_id,  # sheet_id
                        MyRow["id"],  # row_id
                        (os.path.basename(attach_file), open(
                            attach_file, 'rb'), mime_type))
    return
Beispiel #2
0
 def test_update_sheet(self, smart_setup):
     smart = smartsheet.Smartsheet()
     smart.assume_user(smart_setup['users']['moe'].email)
     action = smart.Sheets.update_sheet(
         smart_setup['sheet'].id,
         smart.models.Sheet({'name': 'You knuckleheads!'}))
     assert isinstance(action, smart.models.error.Error)
Beispiel #3
0
def new_tally(project_name, token):
    import smartsheet
    import logging

    # initialize client
    ss_client = smartsheet.Smartsheet(token)
    # make sure we don't miss any errors
    ss_client.errors_as_exceptions(True)
    # log all calls
    logging.basicConfig(filename='rwsheet.log', level=logging.INFO)

    # copy overview template
    response = ss_client.Sheets.copy_sheet(
        6257571175655300,
        smartsheet.models.ContainerDestination({
            'destination_type':
            'workspace',
            'destination_id':
            2148322879268740,
            'new_name':
            project_name + ' Overview'
        }),
        include=['data'])

    print('done')
def export(sheet_id, export_type, export_format='csv', export_to=''):
    """
    A slightly more elegant way to export a file from Smartsheet
        * get_sheet_as_ does not support Gantt chart export
        * for Excel format, get_sheet_as and get_report_as only support xlsx output
    TODO: prevent overwriting files with the same name
    :param sheet_id:        int, required; sheet ID
    :param export_type:     str, required; 'sheet' or 'report' to call get_sheet_* or get_report_*
    :param export_format:   str, optional: 'csv', 'xlsx', or (sheet only) 'pdf' to call get_*_csv, get_*_xlsx, etc. (default 'csv')
    :param export_to:       str, optional; the file path you're exporting the file to (ex: C:\foo\bar\ ) (default
    :return:                none
    """

    ss = smartsheet.Smartsheet(access_token)  # initialize client
    ss.errors_as_exceptions(True)  # call out errors

    if export_to == '':  # if no export location specified, use the filepath of this code
        export_path = _dir
    else:
        export_path = export_to

    sheet_name = ss.Sheets.get_sheet(sheet_id).name

    if export_type == 'sheet':  # calls get_sheet_as_
        user_msg = export_sheet(ss, sheet_id, export_format, export_path,
                                sheet_name)
    elif export_type == 'report':  # calls get_report_as_
        user_msg = export_report(ss, sheet_id, export_format, export_path,
                                 sheet_name)
    else:
        user_msg = 'export_type not valid. Please use \'sheet\' or \'report\'.'

    print(user_msg)
Beispiel #5
0
def new_project_status_row(project, token):
    import smartsheet
    import logging

    map_of_sheets = {}
    project_rows = {}
    status_columns = {}

    # initialize client
    ss_client = smartsheet.Smartsheet(token)
    # make sure we don't miss any errors
    ss_client.errors_as_exceptions(True)
    # log all calls
    logging.basicConfig(filename='rwsheet.log', level=logging.INFO)

    # list sheets
    response = ss_client.Sheets.list_sheets(include_all=True)
    sheets = response.data

    # make sheet map
    for sheet in sheets:
        map_of_sheets[sheet.name] = sheet.id

    # get project sheet
    sheet = ss_client.Sheets.get_sheet(map_of_sheets[project])

    # build row map of project sheet
    for row in sheet.rows:
        project_rows[row.row_number] = row.id

    # get project status sheet
    sheet = ss_client.Sheets.get_sheet(2762627773425540)

    # create column map for project status sheet
    for column in sheet.columns:
        status_columns[column.title] = column.id

    # specify new cell values for new row in project status
    row_a = ss_client.models.Row()
    row_a.to_top = True
    row_a.cells.append({
        'column_id': status_columns['Project #'],
        'value': project[0:6],
        'hyperlink': {
            'sheetId': map_of_sheets[project]
        }
    })
    row_a.cells.append({
        'column_id': status_columns['Project Name'],
        'value': project[7:len(project)]
    })
    row_a.cells.append({
        'column_id': status_columns['Status'],
        'value': 'Open'
    })

    # add row to project status sheet
    response = ss_client.Sheets.add_rows(2762627773425540, [row_a])

    print('done')
def create_sheet(sheet_name):
    ss_token=ss_config['SS_TOKEN']
    ss = smartsheet.Smartsheet(ss_token)

    # # Run a Query to get the col names
    sql= "SHOW COLUMNS FROM povbot.tblPovs "
    cols = db.engine.execute(sql)

    new_col_list=[]
    primary_col = True

    for col in cols:
        # If this column is first make it the required SS primary column
        # else create they type of SS column we need (TEXT_NUMBER,DATE,CHECKBOX etc)
        if col.Type == 'datetime':
            new_col_list.append({'title': col.Field, 'primary': primary_col, 'type': 'DATE'})
        elif col.Field == 'deleted':
            print ('i found deleted')
            new_col_list.append({'title': col.Field, 'primary': primary_col,'type': 'PICKLIST','options':['Yes','No']})
        else:
            new_col_list.append({'title': col.Field, 'primary': primary_col, 'type': 'TEXT_NUMBER'})

        primary_col = False

    sheet_spec = ss.models.Sheet({'name': sheet_name, 'columns': new_col_list})
    response = ss.Home.create_sheet(sheet_spec)
Beispiel #7
0
def dataFrame_Creation(mylist):

    # trying out pandas:

    access_token = ''  # temp access token for smartsheets
    smartsheet_client = smartsheet.Smartsheet(
        access_token)  # accessing our smartsheet
    phone_call_analysis = simple_sheet_to_dataframe(
        smartsheet_client.Sheets.get_sheet(
        ))  # grabing a specific sheet from my account using the sheet number
    newlist = []  # this array is gonna contain the numbers -
    for i in mylist:  # this loop will get rid of spaces
        j = i.replace(' ', '')
        newlist.append(j)
    newFrame = pd.DataFrame(newlist)  # creating a new dataframe
    # creating a new column in the list for testing
    newFrame['phoneList'] = newlist
    outcome = phone_call_analysis.merge(
        newFrame,
        left_on='Telephone Number_Centrix',
        right_on='phoneList',
        how='outer')  # comparing and sorting and creating a new dataframe
    # outer join is to show all the rows even the ones that didn't match
    # left join is to grap everything from the phon_call_anlysis datafram and only the phonlistcolumn from the other dataframe
    # right is the opposite - grabbing everything from the newFrame and only the telephone number_centrix from the phon_call_analysis dataframe
    # inner join only shows the common rows after checking and sorting
    return outcome
Beispiel #8
0
def sort_project_status(token):
    import smartsheet
    import logging

    sheet_map = {}

    # initialize client
    ss_client = smartsheet.Smartsheet(token)
    # make sure we don't miss any errors
    ss_client.errors_as_exceptions(True)
    # log all calls
    logging.basicConfig(filename='rwsheet.log', level=logging.INFO)

    sort_specifier = smartsheet.models.SortSpecifier({
        'sort_criteria': [
            smartsheet.models.SortCriterion({
                'column_id': 7164751437424516,  # Exp Finish
                'direction': 'ASCENDING',
                'column_id': 4957636463486852,  # Schedule Status
                'direction': 'ASCENDING',
                'column_id': 5049912694990724,  # Project Manager
                'direction': 'ASCENDING'
            })
        ]
    })
    sheet = ss_client.Sheets.sort_sheet(2762627773425540, sort_specifier)

    print('done')
 def test_new_and_shared_sheet(self, smart_setup):
     smart = smartsheet.Smartsheet()
     newsheet = smart.models.Sheet({
         'name': 'pytest_social_sheet',
         'columns': [{
             'title': 'Primary',
             'primary': True,
             'type': 'TEXT_NUMBER'
         }]
     })
     action = smart.Folders.create_sheet_in_folder(
         smart_setup['folder'].id, newsheet);
     assert action.message == 'SUCCESS'
     newsheet = action.result
     action = newsheet.share(
         smart.models.Share({
             'accessLevel': 'ADMIN',
             'email': smart_setup['users']['moe'].email
         })
     )
     assert action.message == 'SUCCESS'
     share = action.result
     # now edit as Moe
     smart.assume_user(smart_setup['users']['moe'].email)
     action = newsheet.update_name('You knuckleheads!')
     assert action.message == 'ERROR'
     assert isinstance(action, smart.models.error.Error)
     assert action.result.code == 1004
Beispiel #10
0
def push_xls_to_ss(my_file, ss_name, run_dir=app_cfg['UPDATES_DIR']):
    home = app_cfg['HOME']
    working_dir = app_cfg['WORKING_DIR']
    path_to_run_dir = (os.path.join(home, working_dir, run_dir, my_file))
    print('PUSHING to ', path_to_run_dir, ' to SmartSheets>>>>>> ', ss_name)

    ws = app_cfg['SS_WORKSPACE']

    ss_token = passwords['SS_TOKEN']
    ss = smartsheet.Smartsheet(ss_token)

    # Import as an Excel sheet
    response = ss.Sheets.import_xlsx_sheet(
        path_to_run_dir,
        ss_name,  # sheet_name
        header_row_index=0)
    response_dict = response.to_dict()
    my_sheet_id = response_dict['data']['id']

    # Get the workspace id
    # Then move this sheet to the configured workspace
    ws_info = ss_get_ws(ss, ws)
    ss_move_sheet(ss, my_sheet_id, ws_info['id'])

    return
def config_and_connect(env):
    config = ConfigParser.ConfigParser()
    if env == 'TEST':
        config.read(['config/pctest.cfg'])
        conn = pyodbc.connect('DRIVER={};SERVER={};DATABASE={};trusted_connection={}'.format(
            config.get('sql_server', 'driver'),
            config.get('sql_server', 'server'),
            config.get('sql_server', 'database'),
            config.get('sql_server', 'connection')
            )
        )
    elif env == 'PROD':
        config.read(['config/production.cfg'])
        conn = pyodbc.connect('DRIVER={};SERVER={};PORT={};DATABASE={};UID={};PWD={}'.format(
            config.get('sql_server', 'driver'),
            config.get('sql_server', 'server'),
            config.get('sql_server', 'port'),
            config.get('sql_server', 'database'),
            config.get('sql_server', 'user'),
            config.get('sql_server', 'password')
            )
        )
    cursor = conn.cursor()
    token = config.get('smartsheet', 'access_token')
    ss = smartsheet.Smartsheet(token)
    return ss, token, conn, cursor
def smt_data_extraction():

    xMytoken = 'a0rcziibs8bzzchmpqwgyusd4n'
    SHEET_ID = '8485507110856580'
    xSheet = smartsheet.Smartsheet(xMytoken)

    tt = xSheet.Sheets.get_sheet(SHEET_ID)

    def simple_sheet_to_dataframe(sheet):
        col_names = [col.title for col in sheet.columns]
        rows = []
        for row in sheet.rows:
            cells = []
            for cell in row.cells:
                cells.append(cell.value)
            rows.append(cells)
        data_frame = pd.DataFrame(rows, columns=col_names)
        return data_frame

    sm_data = simple_sheet_to_dataframe(tt)

    datt_only = datt[:10]
    path = 'C:/Users/Mr.Goldss/Desktop/beltmann inventory(smartsheet)/data/'
    filename = 'MARICOPA COUNTY COMPLETE INVENTORY TRANSACTIONS'
    need_col = [
        'PROJECT', 'CATEGORY', 'ITEM/ SKU', 'DESCRIPTION', 'SOURCE', 'SHIPPER',
        'CASE COUNT', '# INNER CASE CTN', '# PER INNER CTN', 'TOTAL QTY',
        'RECEIVED DATE', 'BALANCE', 'CUSTOMER NOTES', 'qty out',
        'full pallet out', 'RELEASE DATE', 'Workorder Nbr', 'Release to cust:'
    ]
    df_smart = sm_data
    df_smart = df_smart[need_col]
    return df_smart
def sheet_details(sheet_name):
    ss_token=ss_config['SS_TOKEN']
    ss = smartsheet.Smartsheet(ss_token)
    sheet_dict = {}

    # Find my Sheet ID
    response = ss.Sheets.list_sheets(include_all=True)
    sheets = response.data

    for sheet in sheets:
        if sheet.name == sheet_name:
            sheet_dict['sheet_name'] = sheet.name
            sheet_dict['sheet_id'] = sheet.id

    # If we found "sheet_name" find columns
    if 'sheet_id' in sheet_dict.keys():
        sheet = ss.Sheets.get_sheet(sheet_dict['sheet_id'])
        sheet_dict['row_count'] = sheet.total_row_count
        sheet_dict['sheet_url']= sheet.permalink
        sheet_dict['modified_at'] = sheet.modified_at.astimezone(pytz.timezone('US/Eastern'))

        columns = sheet.columns

        col_cnt = 1
        for column in columns:
            #print(sheet_dict['sheet_name'], column.title, column.id)
            sheet_dict['col_name_'+str(col_cnt)] = column.title
            sheet_dict['col_id_'+str(col_cnt)] = column.id
            sheet_dict['col_count']=(col_cnt)
            col_cnt += 1
    else:
        sheet_dict['sheet_id'] = "Not Found"

    return sheet_dict
Beispiel #14
0
def image_upload(API_Token, Sheet_ID, Start_Row,
                 image_path):  # Function for uploading images to Smartsheet
    smart = smartsheet.Smartsheet(
        API_Token)  # Initialize Smartsheet using API Token
    sheet = smart.Sheets.get_sheet(
        int(Sheet_ID))  # Get the desired Smartsheet using the Sheet ID

    files = []  # Empty array named files
    for entry in os.listdir(
            image_path):  # Loop through the directory given by user
        files.append(entry)  # Append all files in directory to files array

    for x in range(0,
                   len(files)):  # Loop through all files in that files array
        sg.OneLineProgressMeter('Progress', x + 1, len(files),
                                'key')  # PySimpleGUI's Progress Meter
        sleep(5)  # Sleep function to avoid exceeding the rate limit
        smart.Attachments.attach_file_to_row(  # Function to attach files to a row in Smartsheet
            Sheet_ID,  # Pass through the Sheet ID
            sheet.rows[int(Start_Row) + x - 1].
            id_,  # This will iterate through and get all the row IDs in order
            (
                str(
                    files[x]
                ),  # Name the attachment based on the name of the file being uploaded
                open(str(image_path) + '//' + str(files[x]),
                     'rb'),  # Open the location of the file being uploaded
                'application/msword'
            )  # Pretty sure this does nothing but I do not want to delete it
        )
def get_tasks(sheet_id):
    ss = smartsheet.Smartsheet(smartsheet_token)
    ss.errors_as_exceptions(True)
    try:
        raw_sheet = ss.Sheets.get_sheet(sheet_id)
    except:
        print("Something is wrong...")
        sys.exit(1)

    sheet = raw_sheet.to_dict()
    title_column_id = [
        c['id'] for c in sheet['columns'] if c['title'] == 'Task Name'
    ][0]
    row_tasks = []
    for r in sheet['rows']:
        task = [
            c['displayValue'] for c in r['cells']
            if c['columnId'] == title_column_id
        ][0]
        if 'parentId' in r.keys():
            par_id = r['parentId']
        else:
            par_id = None
        row_tasks.append([par_id, r['id'], task])

    return row_tasks
def add_rows(sheet_dict):
    ss_token=ss_config['SS_TOKEN']
    ss = smartsheet.Smartsheet(ss_token)

    sheet_id= sheet_dict['sheet_id']   # "test" Sheet ID
    col_count = sheet_dict['col_count']

    # Build the SQL query from tblPovs
    povs = ta_povs.query.order_by(ta_povs.company_name).all()

    for pov in povs:
        row_next = ss.models.Row()
        row_next.to_top = True

        for x in range(1, col_count+1):
            col_id_key = 'col_id_' + str(x)
            col_name_key = 'col_name_' + str(x)

            col_id = sheet_dict[col_id_key]
            col_name = sheet_dict[col_name_key]
            row_value = eval("pov."+col_name)

            # Change None type to something else
            if row_value is None:
                row_value = ""

            # Change datetime to string
            if isinstance(row_value, datetime):
                row_value = row_value.strftime("%A, %d. %B %Y %I:%M%p")

            row_dict = { 'column_id':col_id ,'value': row_value, 'strict': False}
            row_next.cells.append(row_dict)

        response = ss.Sheets.add_rows(sheet_id, [row_next])
def delete_sheet(sheet_dict):
    ss_token=ss_config['SS_TOKEN']
    ss = smartsheet.Smartsheet(ss_token)

    if sheet_dict['sheet_id'] != 'Not Found':
        sheet_id = sheet_dict['sheet_id']
        response = ss.Sheets.delete_sheet(sheet_id)       # sheet_id
Beispiel #18
0
def clear_sheet(access_token, sheet_id):
    """
    Removes all cell data from a given Smartsheet Sheet, while preserving columns, etc., by gathering a list of row
    IDs, then deleting those rows. Could probably be done without utilizing Requests - Requests in and Smartsheet
    out seems wonky - but I believe it's cleanest/shortest path to the solution.
    :param access_token:    str, required; Smartsheet api token
    :param sheet_id:        int, required; sheet ID
    :return:                none
    """

    api_prefix_url = 'https://api.smartsheet.com/2.0/sheets/'  # base Smartsheet api url for Requests
    url = api_prefix_url + str(sheet_id)  # full url for requests
    header = {  # header for requests
        'Authorization': 'Bearer ' + access_token,
        'Content-Type': 'application/json',
        'cache-control': 'no-cache'
    }

    ss = smartsheet.Smartsheet(access_token)  # initialize Smartsheet client
    del_tbl = requests.get(
        url, headers=header).json()  # GET table for row IDs to delete

    del_row_ids = []  # empty list for row IDs
    for i, rows in enumerate(del_tbl['rows']):
        del_row_ids.append(
            del_tbl['rows'][i]['id'])  # append list with row IDs

    ss.Sheets.delete_rows(sheet_id, del_row_ids)  # delete all rows on sheet
Beispiel #19
0
def create_overview_columns(project, token):
    ss_client = smartsheet.Smartsheet(token)
    # make sure we don't miss any errors
    ss_client.errors_as_exceptions(True)
    # log all calls
    logging.basicConfig(filename='rwsheet.log', level=logging.INFO)
    # build sheet map
    response = ss_client.Sheets.list_sheets(include_all=True)
    sheets = response.data
    map_of_sheets = {}
    overview_columns = {}
    departments = [
        'Shipping', 'Sculpt', 'Paint', 'Metal', 'Install', 'Fab', 'Design',
        'CNC'
    ]

    for sheet in sheets:
        map_of_sheets[sheet.name] = sheet.id

    # get source sheet
    sheet = ss_client.Sheets.get_sheet(map_of_sheets[project])

    # create column map for source sheet
    for column in sheet.columns:
        project_columns[column.title] = column.id

    # get overview sheet
    sheet = ss_client.Sheets.get_sheet(map_of_sheets[project + ' Overview'])

    # create column map for overview sheet
    for column in sheet.columns:
        overview_columns[column.title] = column.id

    for department in departments:
        make_start_reference(department, map_of_sheets[project], sheet.id,
                             token)
        make_finish_reference(department, map_of_sheets[project], sheet.id,
                              token)

        row_a = ss_client.models.Row()
        row_a.to_top = True
        row_a.cells.append({
            'column_id': overview_columns['Project Name'],
            'value': sheet.name[0:len(sheet.name) - 9]
        })
        row_a.cells.append({
            'column_id': overview_columns['Resource'],
            'value': department
        })
        row_a.cells.append({
            'column_id': overview_columns['Start'],
            'formula': '=MIN({' + department + ' Start Range})'
        })
        row_a.cells.append({
            'column_id': overview_columns['Finish'],
            'formula': '=MAX({' + department + ' Finish Range})'
        })
        response = ss_client.Sheets.add_rows(sheet.id, [row_a])

    print('done')
Beispiel #20
0
 def __init__(self, config: Mapping[str, Any]):
     self._spreadsheet_id = config["spreadsheet_id"]
     self._access_token = config["access_token"]
     api_client = smartsheet.Smartsheet(self._access_token)
     api_client.errors_as_exceptions(True)
     # each call to `Sheets` makes a new instance, so we save it here to make no more new objects
     self._get_sheet = api_client.Sheets.get_sheet
     self._data = None
Beispiel #21
0
def importSmartsheetCsv(sheetName, fileName):

    smart = smartsheet.Smartsheet(access_token)
    smart.errors_as_exceptions(True)

    imported_sheet = smart.Sheets.import_csv_sheet(fileName,
                                                   sheetName,
                                                   header_row_index=0)
Beispiel #22
0
    def get_conn(self):
        """Authenticates with the Smartsheet API and returns the session object.

        Returns:
            Smartsheet -- The Smartsheet API session.
        """

        return smartsheet.Smartsheet(self.token)
Beispiel #23
0
def main(event, context):

	# initiate the connection to S3
	s3 = boto3.client("s3")
	
	# Initialize client
	smartsheet_token = os.environ['SmartSheetToken']
	smartsheet_client = smartsheet.Smartsheet(smartsheet_token)
	
	# Make sure we don't miss any errors
	smartsheet_client.errors_as_exceptions(True)

	
	SHEET_ID = os.environ['SheetID']

	# Get all columns
	action = smartsheet_client.Sheets.get_columns(SHEET_ID, include_all=True)
	columns = action.data
	cols = []
	for col in columns:
		cols.append(col.title)

	# Get each card info
	values = []
	sheet_object = smartsheet_client.Sheets.get_sheet(SHEET_ID)
	for row in sheet_object.rows:
		row_values = []
		for cell in row.cells:
			if cell.value:
				row_values.append(cell.value)
			else:
				row_values.append('')
		values.append(row_values)
	
	# Combine card titles with card values
	col_val = []
	for val in values:
		col_val.append(dict(zip(cols, val)))
	print(col_val[0])

	# Convert to JSON
	json_data = json.dumps(col_val, indent=2)
	
	# Convert to bytes
	encoded = bytes(json_data.encode('utf-8'))
	
	# Compress
	compressed = gzip.compress(encoded)
	print('Prepare to upload data into the S3 bucket ....')
	
	# Provide target S3 bucket name and output file name
	bucket = '<your bucket name>'
	#bucket = 'ssdp-s3-csai-dsde-dev'
	filename= 'test_smartsheet' + '.json'
	
	# Write into the S3 bucket
	s3.put_object(Bucket = bucket, Key = filename, Body = encoded)
	print('Your file will land into the bucket', bucket, 'soon!!!')
Beispiel #24
0
def ss_append_data(access_token, sheet_name, append_data):

    if settings.YH_SS_PRODUCTION_SITE:
        # Initialize client proxy.server:3128 and provide access token
        proxies = {'https': 'https://proxy.server:3128'}
        ss = smartsheet.Smartsheet(proxies=proxies, access_token=access_token)
    else:  # just provide access token
        # Instantiate smartsheet and specify access token value.
        ss = smartsheet.Smartsheet(access_token)

    # Get the id for the Sheet name
    search_results = ss.Search.search(sheet_name).results
    sheet_id = next(result.object_id for result in search_results
                    if result.object_type == 'sheet')

    # Get all the columns for the sheet
    all_columns = ss.Sheets.get_columns(sheet_id, include_all=True)
    columns = all_columns.data

    # Create two reference dictionaries that will be useful in the subsequent code
    # colMap {column-name: column-id } and colMapRev { column-id: column-name }
    colMap = {}
    colMapRev = {}
    for col in columns:
        colMap[col.title] = col.id
        colMapRev[col.id] = col.title

    # Specify cell values for row
    new_row = smartsheet.models.Row()
    new_row.to_bottom = True

    for data_element in append_data:
        for key in data_element:
            #print(key,data_element[key])
            new_row.cells.append({
                'column_id': colMap.get(key),
                'value': data_element[key]
            })

    # Add rows to sheet
    response = ss.Sheets.add_rows(
        sheet_id,  # sheet_id
        [new_row])

    return
Beispiel #25
0
    def __init__(self):
        """ Constructor - initialze access using ACCESS_TOKEN"""
        try:
            self.smart_sheets = smartsheet.Smartsheet(ACCESS_TOKEN)
        except Exception as e:
            logging.error(traceback.format_exc())
            sys.exit(1)

        self.smart_sheets.errors_as_exceptions(True)
Beispiel #26
0
def smartsheetGetSheets(filter):

    smart = smartsheet.Smartsheet(access_token)
    smart.errors_as_exceptions(True)

    response = smart.Sheets.list_sheets(include_all=True)
    sheets = response.data
    for sheet in sheets:
        if filter.lower() in sheet.name.lower():
            print("ID: %s - NAME: %s" % (sheet.id, sheet.name))
Beispiel #27
0
def make_pm_row(source_row, source_pm, token):
    ss_client = smartsheet.Smartsheet(token)
    new_cell = smartsheet.models.Cell()
    new_cell.column_id = column_map['PM']
    new_cell.value = source_pm
    new_cell.strict = False

    new_row = smartsheet.models.Row()
    new_row.id = source_row.id
    new_row.cells.append(new_cell)

    return new_row
Beispiel #28
0
    def read(self, logger: AirbyteLogger, config: json,
             catalog: ConfiguredAirbyteCatalog,
             state: Dict[str, any]) -> Generator[AirbyteMessage, None, None]:

        access_token = config["access_token"]
        spreadsheet_id = config["spreadsheet_id"]
        smartsheet_client = smartsheet.Smartsheet(access_token)

        for configured_stream in catalog.streams:
            stream = configured_stream.stream
            properties = stream.json_schema["properties"]
            if isinstance(properties, list):
                columns = tuple(key for dct in properties
                                for key in dct.keys())
            elif isinstance(properties, dict):
                columns = tuple(i for i in properties.keys())
            else:
                logger.error(
                    "Could not read properties from the JSONschema in this stream"
                )
            name = stream.name

            try:
                sheet = smartsheet_client.Sheets.get_sheet(spreadsheet_id)
                sheet = json.loads(str(sheet))  # make it subscriptable
                logger.info(f"Starting syncing spreadsheet {sheet['name']}")
                logger.info(f"Row count: {sheet['totalRowCount']}")

                for row in sheet["rows"]:
                    values = tuple(i["value"] if "value" in i else ""
                                   for i in row["cells"])
                    try:
                        data = dict(zip(columns, values))

                        yield AirbyteMessage(
                            type=Type.RECORD,
                            record=AirbyteRecordMessage(
                                stream=name,
                                data=data,
                                emitted_at=int(datetime.now().timestamp()) *
                                1000),
                        )
                    except Exception as e:
                        logger.error(
                            f"Unable to encode row into an AirbyteMessage with the following error: {e}"
                        )

            except Exception as e:
                logger.error(f"Could not read smartsheet: {name}")
                raise e
        logger.info(f"Finished syncing spreadsheet with ID: {spreadsheet_id}")
Beispiel #29
0
def downloadSmartsheet():
    downloadDir = globalVars.MAIN_DIR + "/input_files"

    print('Attempting to download Smartsheet as Excel File...')

    # this is the bearer token
    ss_client = smartsheet.Smartsheet(credentials.smartsheet_api_key)
    # content hub library sheet id=7229045214603140
    sheetID = credentials.smartsheet_id
    # sheet = ss_client.Sheets.get_sheet(sheetID)

    ss_client.Sheets.get_sheet_as_excel(credentials.smartsheet_id, downloadDir)

    print("Downloaded to " + downloadDir)
Beispiel #30
0
def make_start_reference(dept, source, destination, token):
    ss_client = smartsheet.Smartsheet(token)
    xref = ss_client.models.CrossSheetReference({
        'name':
        dept + ' Start Range',
        'source_sheet_id':
        int(source),
        'start_column_id':
        project_columns[dept + ' Start'],
        'end_column_id':
        project_columns[dept + ' Start']
    })

    result = ss_client.Sheets.create_cross_sheet_reference(destination, xref)