예제 #1
0
    def check_for_conflicts(self, files):
        local_store = self.get_local_store()
        retrieve_result = self.project.get_retrieve_result({"files": files})
        properties = retrieve_result.fileProperties
        for f in files:
            ext = util.get_file_extension_no_period(f)
            apex_type = util.get_meta_type_by_suffix(ext)
            apex_entity_api_name = util.get_file_name_no_extension(f)
            body_field = 'Body'
            if apex_type['xmlName'] == 'ApexPage' or apex_type[
                    'xmlName'] == 'ApexComponent':
                body_field = 'Markup'
            api_name_plus_extension = apex_entity_api_name + "." + ext

            server_property = None
            for p in properties:
                if p["fullName"] == apex_entity_api_name:
                    server_property = p
                    try:
                        config.api_name_to_id_dict[p["fullName"]] = p["id"]
                    except:
                        pass
                    break
            if api_name_plus_extension in local_store and server_property != None:
                local_store_entry = local_store[api_name_plus_extension]
                local_last_modified_date = local_store_entry[
                    "lastModifiedDate"]
                server_last_modified_date = server_property['lastModifiedDate']
                last_modified_name = server_property['lastModifiedByName']
                qr = self.project.sfdc_client.execute_query(
                    "Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'"
                    .format(body_field, apex_type['xmlName'],
                            apex_entity_api_name))
                # lets use the soap endpoint here to help the folks being affected by their proxy refusing REST requests bc of Authorization header
                # https://github.com/joeferraro/MavensMate-SublimeText/issues/315#issuecomment-35996112
                # try:
                #     qr = self.project.sfdc_client.query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name))
                # except:
                #     qr = self.project.sfdc_client.execute_query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name))
                body = qr['records'][0][body_field]
                body = body.encode('utf-8')
                if str(local_last_modified_date) != str(
                        server_last_modified_date
                ) or local_store_entry['mmState'] == 'dirty':
                    if local_store_entry['mmState'] != 'dirty':
                        local_store_entry['mmState'] = 'dirty'
                    msg = util.generate_request_for_action_response(
                        "The local version of your file and the server copy are out of sync.\n\n{0} was last modified by {1} on {2}."
                        .format(apex_entity_api_name, last_modified_name,
                                server_last_modified_date),
                        'compile', ["Diff With Server", "Operation Canceled"],
                        tmp_file_path=util.put_tmp_file_on_disk(
                            apex_entity_api_name, body,
                            apex_type.get('suffix', '')))
                    self.mark_dirty(api_name_plus_extension)
                    return True, msg
        return False, None
예제 #2
0
파일: misc.py 프로젝트: DavidCDean/mm
 def execute(self):
     file_name = self.params["file_name"]
     extension = util.get_file_extension_no_period(file_name)
     mtype = util.get_meta_type_by_suffix(extension)
     full_file_path = os.path.join(config.project.location, "src", mtype["directoryName"], file_name)
     params = {
         "project_name"  : config.project.project_name,
         "file_name"     : full_file_path,
         "line_number"   : self.params.get("line_number", 0)
     } 
     config.connection.run_subl_command("open_file_in_project", json.dumps(params))
     return util.generate_success_response("ok")
예제 #3
0
파일: misc.py 프로젝트: z00162835/mm
 def execute(self):
     file_name = self.params["file_name"]
     extension = util.get_file_extension_no_period(file_name)
     mtype = util.get_meta_type_by_suffix(extension)
     full_file_path = os.path.join(config.project.location, "src",
                                   mtype["directoryName"], file_name)
     params = {
         "project_name": config.project.project_name,
         "file_name": full_file_path,
         "line_number": self.params.get("line_number", 0)
     }
     config.connection.run_subl_command("open_file_in_project",
                                        json.dumps(params))
     return util.generate_success_response("ok")
예제 #4
0
    def sync_with_server(self, files):
        local_store = self.get_local_store()
        retrieve_result = self.project.get_retrieve_result({"files": files})
        properties = retrieve_result.fileProperties
        for f in files:
            debug('\n\n\n\nFILE BRO ----->')

            ext = util.get_file_extension_no_period(f)
            apex_type = util.get_meta_type_by_suffix(ext)
            apex_entity_api_name = util.get_file_name_no_extension(f)
            body_field = 'Body'
            if apex_type['xmlName'] == 'ApexPage' or apex_type[
                    'xmlName'] == 'ApexComponent':
                body_field = 'Markup'
            api_name_plus_extension = apex_entity_api_name + "." + ext

            server_property = None
            for p in properties:
                if p["fullName"] == apex_entity_api_name:
                    server_property = p
                    try:
                        config.api_name_to_id_dict[p["fullName"]] = p["id"]
                    except:
                        pass
                    break

            debug(api_name_plus_extension)

            if api_name_plus_extension in local_store and server_property != None:
                local_store_entry = local_store[api_name_plus_extension]
                local_last_modified_date = local_store_entry[
                    "lastModifiedDate"]
                server_last_modified_date = server_property['lastModifiedDate']
                last_modified_name = server_property['lastModifiedByName']

                qr = self.project.sfdc_client.execute_query(
                    "Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'"
                    .format(body_field, apex_type['xmlName'],
                            apex_entity_api_name))
                body = qr['records'][0][body_field]
                body = body.encode('utf-8')

                msg = util.generate_sync_response(
                    "Server version was last modified by {0} on {1}.".format(
                        last_modified_name, server_last_modified_date),
                    tmp_file_path=util.put_tmp_file_on_disk(
                        apex_entity_api_name, body,
                        apex_type.get('suffix', '')))
                return msg
        return None
예제 #5
0
 def check_for_conflicts(self, files):
     local_store = self.get_local_store()
     retrieve_result = self.project.get_retrieve_result({"files":files})
     properties = retrieve_result.fileProperties
     for f in files:
         ext = util.get_file_extension_no_period(f)
         apex_type = util.get_meta_type_by_suffix(ext)
         apex_entity_api_name = util.get_file_name_no_extension(f)
         body_field = 'Body'
         if apex_type['xmlName'] == 'ApexPage' or apex_type['xmlName'] == 'ApexComponent':
             body_field = 'Markup'
         api_name_plus_extension = apex_entity_api_name+"."+ext
         
         server_property = None
         for p in properties:
             if p["fullName"] == apex_entity_api_name:
                 server_property = p
                 try:
                     config.api_name_to_id_dict[p["fullName"]] = p["id"]
                 except:
                     pass
                 break
         if api_name_plus_extension in local_store and server_property != None:
             local_store_entry = local_store[api_name_plus_extension]
             local_last_modified_date = local_store_entry["lastModifiedDate"]
             server_last_modified_date = server_property['lastModifiedDate']
             last_modified_name = server_property['lastModifiedByName']
             qr = self.project.sfdc_client.execute_query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name))
             # lets use the soap endpoint here to help the folks being affected by their proxy refusing REST requests bc of Authorization header
             # https://github.com/joeferraro/MavensMate-SublimeText/issues/315#issuecomment-35996112
             # try:
             #     qr = self.project.sfdc_client.query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name))
             # except:
             #     qr = self.project.sfdc_client.execute_query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name))
             body = qr['records'][0][body_field]
             body = body.encode('utf-8')
             if str(local_last_modified_date) != str(server_last_modified_date) or local_store_entry['mmState'] == 'dirty':
                 if local_store_entry['mmState'] != 'dirty':
                     local_store_entry['mmState'] = 'dirty'
                 msg = util.generate_request_for_action_response(
                     "The local version of your file and the server copy are out of sync.\n\n{0} was last modified by {1} on {2}."
                     .format(apex_entity_api_name, last_modified_name, server_last_modified_date),
                     'compile',
                     ["Diff With Server","Operation Canceled"],
                     tmp_file_path=util.put_tmp_file_on_disk(apex_entity_api_name, body, apex_type.get('suffix', ''))
                 )
                 self.mark_dirty(api_name_plus_extension)
                 return True, msg
     return False, None
예제 #6
0
    def sync_with_server(self, files):
        local_store = self.get_local_store()
        retrieve_result = self.project.get_retrieve_result({"files":files})
        properties = retrieve_result.fileProperties
        for f in files:
            debug('\n\n\n\nFILE BRO ----->')

            ext = util.get_file_extension_no_period(f)
            apex_type = util.get_meta_type_by_suffix(ext)
            apex_entity_api_name = util.get_file_name_no_extension(f)
            body_field = 'Body'
            if apex_type['xmlName'] == 'ApexPage' or apex_type['xmlName'] == 'ApexComponent':
                body_field = 'Markup'
            api_name_plus_extension = apex_entity_api_name+"."+ext
            
            server_property = None
            for p in properties:
                if p["fullName"] == apex_entity_api_name:
                    server_property = p
                    try:
                        config.api_name_to_id_dict[p["fullName"]] = p["id"]
                    except:
                        pass
                    break
            
            debug(api_name_plus_extension)

            if api_name_plus_extension in local_store and server_property != None:
                local_store_entry = local_store[api_name_plus_extension]
                local_last_modified_date = local_store_entry["lastModifiedDate"]
                server_last_modified_date = server_property['lastModifiedDate']
                last_modified_name = server_property['lastModifiedByName']
                
                qr = self.project.sfdc_client.execute_query("Select LastModifiedById, LastModifiedDate, LastModifiedBy.Name, {0} From {1} Where Name = '{2}'".format(body_field, apex_type['xmlName'], apex_entity_api_name))
                body = qr['records'][0][body_field]
                body = body.encode('utf-8')

                msg = util.generate_sync_response(
                    "Server version was last modified by {0} on {1}.".format(last_modified_name, server_last_modified_date),
                    tmp_file_path=util.put_tmp_file_on_disk(apex_entity_api_name, body, apex_type.get('suffix', ''))
                )
                return msg
        return None
예제 #7
0
    def execute(self):
        project = config.project
        sfdc_client = config.sfdc_client
        files = self.params.get('files', None)
        for f in files:
            if '-meta.xml' in f:
                corresponding_file = f.split('-meta.xml')[0]
                if corresponding_file not in files:
                    files.append(corresponding_file)
        for f in files:
            if '-meta.xml' in f:
                continue
            file_ext = f.split('.')[-1]
            metadata_type = util.get_meta_type_by_suffix(file_ext)
            if metadata_type['metaFile'] == True:
                corresponding_file = f + '-meta.xml'
                if corresponding_file not in files:
                    files.append(corresponding_file)

        metadata_package_dict = util.get_metadata_hash(files)
        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        package_xml = util.get_package_xml_contents(metadata_package_dict)
        util.put_package_xml_in_directory(tmp_unpackaged, package_xml, True)
        empty_package_xml = util.get_empty_package_xml_contents()
        util.put_empty_package_xml_in_directory(tmp_unpackaged, empty_package_xml)
        zip_file = util.zip_directory(tmp, tmp)
        
        purge_on_delete_setting = config.connection.get_plugin_client_setting("mm_purge_on_delete", False);
        if purge_on_delete_setting:
            describe_result = config.sfdc_client.describeMetadata(retXml=False)
            if describe_result.testRequired == True:
                purge_on_delete_setting = False

        deploy_params = {
            "zip_file"          : zip_file,
            "rollback_on_error" : True,
            "ret_xml"           : True,
            "purge_on_delete"   : purge_on_delete_setting
        }
        delete_result = sfdc_client.delete(deploy_params)
        d = xmltodict.parse(delete_result,postprocessor=util.xmltodict_postprocessor)
        shutil.rmtree(tmp)
        result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']
        if result['success'] == True:
            removed = []
            for f in files:
                try:
                    file_ext = f.split('.')[-1]
                    metadata_type = util.get_meta_type_by_suffix(file_ext)
                    if metadata_type == None or not 'directoryName' in metadata_type:
                        continue;
                    directory = metadata_type['directoryName']
                    filepath = os.path.join(project.location, "src", directory, f)
                    metapath = os.path.join(project.location, "src", directory, f + '-meta.xml')
                    os.remove(filepath)
                    os.remove(metapath)
                    # remove the entry in file properties
                    project.conflict_manager.remove_from_local_store(f)
                    removed.append(f)
                except Exception, e:
                    print e.message
            return util.generate_success_response("Removed metadata files: " + (",".join(removed)))
예제 #8
0
    def execute(self):        
        project = config.project

        files = self.params.get('files', None)
        use_tooling_api = config.connection.get_plugin_client_setting('mm_compile_with_tooling_api', False)
        check_for_conflicts = config.connection.get_plugin_client_setting('mm_compile_check_conflicts', False)

        compiling_apex_metadata = True
        for f in files:
            if f.split('.')[-1] not in util.TOOLING_API_EXTENSIONS:
                #cannot use tooling api
                compiling_apex_metadata = False
                break

        #when compiling apex metadata, check to see if it is newer on the server
        if check_for_conflicts and compiling_apex_metadata:
            if 'action' not in self.params or self.params['action'] != 'overwrite':
                has_conflict, msg = config.project.conflict_manager.check_for_conflicts(files)
                if has_conflict:
                    return msg
     
        #use tooling api here, if possible
        if use_tooling_api == True and compiling_apex_metadata and int(float(util.SFDC_API_VERSION)) >= 27:
            if 'metadata_container' not in project.settings or project.settings['metadata_container'] == None:
                container_id = project.sfdc_client.get_metadata_container_id()
                new_settings = project.settings
                new_settings['metadata_container'] = container_id
                project.put_settings_file(new_settings)
            else:
                container_id = project.settings['metadata_container']
            
            file_ext = files[0].split('.')[-1]
            try:
                result = project.sfdc_client.compile_with_tooling_api(files, container_id)
            except MetadataContainerException as e:
                project.sfdc_client.delete_mavensmate_metadatacontainers_for_this_user()
                response = project.sfdc_client.new_metadatacontainer_for_this_user()
                project.update_setting("metadata_container",response["id"])
                #return CompileSelectedMetadataCommand(params=self.params,args=self.args).execute()
                #ensure only a single retry
                result = project.sfdc_client.compile_with_tooling_api(files, response["id"])

            if 'Id' in result and 'State' in result:
                if result['State'] == 'Completed':
                    project.conflict_manager.refresh_local_store(files=files)
                return util.generate_response(result)

        #the user has either chosen not to use the tooling api, or it's non apex metadata
        else:
            try:
                for f in files:
                    if '-meta.xml' in f:
                        corresponding_file = f.split('-meta.xml')[0]
                        if corresponding_file not in files:
                            files.append(corresponding_file)
                for f in files:
                    if '-meta.xml' in f:
                        continue
                    file_ext = f.split('.')[-1]
                    metadata_type = util.get_meta_type_by_suffix(file_ext)
                    if metadata_type == None:
                        if sys.platform == "win32":
                            dir_parts = f.split("\\")
                        else:
                            dir_parts = f.split("/")
                        if 'documents' in dir_parts:
                            metadata_type = util.get_meta_type_by_name("Document") 
                    if metadata_type != None and 'metaFile' in metadata_type and metadata_type['metaFile'] == True:
                        corresponding_file = f + '-meta.xml'
                        if corresponding_file not in files:
                            files.append(corresponding_file)

                metadata_package_dict = util.get_metadata_hash(files)
                #debug(metadata_package_dict)
                tmp = util.put_tmp_directory_on_disk()
                os.makedirs(os.path.join(tmp,"unpackaged"))
                #copy files from project directory to tmp
                for full_file_path in files:
                    if 'package.xml' in full_file_path:
                        continue
                    if config.is_windows: 
                        destination = os.path.join(tmp,'unpackaged',full_file_path.split('\src\\')[1])
                    else:
                        destination = os.path.join(tmp,'unpackaged',full_file_path.split('/src/')[1])
                    destination_directory = os.path.dirname(destination)
                    if not os.path.exists(destination_directory):
                        os.makedirs(destination_directory)
                    shutil.copy2(full_file_path, destination_directory)

                package_xml = util.get_package_xml_contents(metadata_package_dict)
                util.put_package_xml_in_directory(os.path.join(tmp,"unpackaged"), package_xml)
                zip_file = util.zip_directory(tmp, tmp)
                deploy_params = {
                    "zip_file"          : zip_file,
                    "rollback_on_error" : True,
                    "ret_xml"           : True
                }
                deploy_result = project.sfdc_client.deploy(deploy_params)

                d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor)
                result = d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result']
                shutil.rmtree(tmp)

                # Get new properties for the files we just compiled
                if result['success'] == True:
                    project.conflict_manager.refresh_local_store(files=files)

                return json.dumps(result)

            except Exception, e:
                try:
                    shutil.rmtree(tmp)
                except:
                    pass
                return util.generate_error_response(e.message)
예제 #9
0
    def execute(self):
        project = config.project
        sfdc_client = config.sfdc_client

        metadata_type                   = self.params.get('metadata_type', None)
        github_template                 = self.params.get('github_template', None)
        params                          = self.params.get('params', None)

        if params == None:
            raise MMException('The payload to create metadata has recently changed. If you are using Sublime Text, you likely need to update your MavensMate plugin to 3.4.8+')

        if "api_name" not in params or params["api_name"] == None:
            return util.generate_error_response("You must provide a name for the new metadata.")

        api_name = params.get('api_name')

        if sfdc_client.does_metadata_exist(object_type=metadata_type, name=api_name) == True:
            mt = util.get_meta_type_by_name(metadata_type)
            filepath = os.path.join(project.location, 'src', mt['directoryName'], api_name+'.'+mt['suffix'])
            fetched = ""
            if not os.path.exists(filepath):
                self.params['files'] = [filepath]
                RefreshSelectedMetadataCommand(params=self.params,args=self.args).execute()
                fetched = ", fetched metadata file from server"
            raise MMException("This API name is already in use in your org" + fetched + ".")      

        tmp, tmp_unpackaged = util.put_tmp_directory_on_disk(True)
        
        util.put_skeleton_files_on_disk(metadata_type, tmp_unpackaged, github_template, params)
        package_xml_body = util.get_package_xml_contents({metadata_type : [ api_name ]})
        util.put_package_xml_in_directory(tmp_unpackaged, package_xml_body)
        zip_file = util.zip_directory(tmp, tmp)
        deploy_params = {
            "zip_file"          : zip_file,
            "rollback_on_error" : True,
            "ret_xml"           : True
        }
        deploy_result = sfdc_client.deploy(deploy_params)
        d = xmltodict.parse(deploy_result,postprocessor=util.xmltodict_postprocessor)
        meta_dir = ""
        files = []
        path = None
        for dirname, dirnames, filenames in os.walk(tmp_unpackaged):
            for filename in filenames:
                if 'package.xml' in filename:
                    continue
                full_file_path = os.path.join(dirname, filename)
                if '-meta.xml' in filename:
                    extension = filename.replace('-meta.xml','').split(".")[-1]
                else:
                    extension = filename.split(".")[-1]
                mt = util.get_meta_type_by_suffix(extension)
                if mt != None: 
                    meta_dir = mt['directoryName']
                    path = os.path.join(project.location, 'src', meta_dir)
                    if not os.path.exists(path):
                        os.makedirs(path)
                    files.append(os.path.join(path, filename))
                elif extension != "xml":
                    continue;
                # only apex files and meta.xml files should make it to here
                shutil.copy(full_file_path, path)
        shutil.rmtree(tmp)
        
        project.update_package_xml_with_metadata(metadata_type, api_name)
        project.conflict_manager.refresh_local_store(files=files)

        return json.dumps(d["soapenv:Envelope"]["soapenv:Body"]['checkDeployStatusResponse']['result'])
예제 #10
0
    def execute(self):
        archive_deployments = config.connection.get_plugin_client_setting("mm_archive_deployments", True)
        finish_deploy = self.params.get('finish', False)
        compare = config.connection.get_plugin_client_setting("mm_compare_before_deployment", True)
        destinations = self.params['destinations']
        deploy_metadata = config.sfdc_client.retrieve(package=self.params['package'])
        deploy_name = self.params.get('new_deployment_name', None)
        threads = []
        
        if not finish_deploy and compare:
            source_retrieve_result = config.sfdc_client.retrieve(package=self.params['package'])
            debug('source_retrieve_result')
            debug(source_retrieve_result)

            source_dict = {}
            for fp in source_retrieve_result.fileProperties:
                source_dict[fp.fileName] = fp

            debug('source_dict')
            debug(source_dict) 

            #need to compare package.xml to destination orgs here
            for destination in destinations:
                thread = CompareHandler(config.project, destination, self.params, self.params['package'])
                threads.append(thread)
                thread.start()  
                
            compare_results = []
            for thread in threads:
                thread.join()  
                compare_results.append(thread.result)
            
            debug('compare_results')
            debug(compare_results)
            destination_dict = {}

            for cr in compare_results:
                if 'success' in cr and cr['success'] == False:
                    destination_dict[cr['username']] = cr
                else:
                    cr_dict = {}
                    for fpfp in cr.fileProperties:
                        cr_dict[fpfp.fileName] = fpfp
                    destination_dict[cr.username] = cr_dict

            debug('destination_dict')
            debug(destination_dict)    

            final_compare_result = {}
            for d in destinations:
                final_compare_result[d['username']] = {}

            for file_name, file_details in source_dict.iteritems():
                if 'package.xml' in file_name:
                    continue; 
                for username, username_value in destination_dict.iteritems():
                    if 'success' in username_value and username_value['success'] == False:
                        final_compare_result[username] = username_value
                    else:
                        destination_retrieve_details = destination_dict[username]
                        
                        if 'package.xml' in file_name:
                            continue

                        short_file_name = file_name.split('/')[-1]
                        mtype = util.get_meta_type_by_suffix(short_file_name.split('.')[-1])
       
                        if file_name not in destination_retrieve_details:
                            final_compare_result[username][short_file_name] = {
                                'name' : short_file_name,
                                'type' : mtype['xmlName'],
                                'action': 'insert',
                                'message' : 'Create'
                            }
                        else:
                            destination_file_detail = destination_retrieve_details[file_name]
                            source_file_detail = source_dict[file_name]
                            if source_file_detail.lastModifiedDate >= destination_file_detail.lastModifiedDate:
                                final_compare_result[username][file_name] = {
                                    'name' : short_file_name,
                                    'type' : mtype['xmlName'],
                                    'action' : 'update',
                                    'message' : 'You will overwrite this file'
                                }
                            else:
                                final_compare_result[username][file_name] = {
                                    'name' : short_file_name,
                                    'type' : mtype['xmlName'],
                                    'action' : 'update_conflict',
                                    'message' : 'Destination file is newer than source file'
                                }
            
            debug('final_compare_result')
            debug(final_compare_result) 

            if self.args.respond_with_html == True:
                html = util.generate_html_response('deploy_compare', final_compare_result, self.params)
                response = util.generate_success_response(html, "html") # returns json
                response['compare_success'] = True
                # if deployment to one org fails, the entire deploy was not successful
                # for result in final_compare_result:
                #     if result['success'] == False:
                #         response['compare_success'] = False
                #         break
                return json.dumps(response)
            else:
                return json.dumps(final_compare_result,indent=4)   

        for destination in destinations:
            if archive_deployments:
                deploy_path = os.path.join(config.project.location,"deploy",destination['username'])
                if not os.path.exists(deploy_path):
                    os.makedirs(deploy_path)
                if not os.path.isfile(os.path.join(config.project.location,"deploy",'.config')):
                    config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb')
                    config_file_contents = { 
                        'deployments' : {
                            'named' : [],
                            'timestamped' : []
                        }
                    }
                    config_file.write(json.dumps(config_file_contents))
                    config_file.close()   

                ts = time.time()
                if not config.is_windows:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
                else:
                    timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H %M %S')

                if deploy_name:
                    if os.path.isdir(os.path.join(config.project.location,"deploy",destination['username'],deploy_name)):
                        shutil.rmtree(os.path.join(config.project.location,"deploy",destination['username'],deploy_name))
                    os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],deploy_name))
                    util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],deploy_name))

                    config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config'))
                    named_deployment = {
                        'destination' : destination['username'],
                        'name' : deploy_name,
                        'timestamp' : timestamp,
                        'id' : util.get_random_string(30),
                        'package' : os.path.join(config.project.location,"deploy",destination['username'],deploy_name,'unpackaged','package.xml')
                    }
                    config_file_json['deployments']['named'].append(named_deployment)
                    config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb')
                    config_file.write(json.dumps(config_file_json))
                    config_file.close()
                else:
                    os.makedirs(os.path.join(config.project.location,"deploy",destination['username'],timestamp))
                    util.extract_base64_encoded_zip(deploy_metadata.zipFile, os.path.join(config.project.location,"deploy",destination['username'],timestamp))

                    config_file_json = util.parse_json_from_file(os.path.join(config.project.location,"deploy",'.config'))
                    timestamped_deployment = {
                        'destination' : destination['username'],
                        'timestamp' : timestamp,
                        'id' : util.get_random_string(30),
                        'package' : os.path.join(config.project.location,"deploy",destination['username'],timestamp,'unpackaged','package.xml')
                    }
                    config_file_json['deployments']['timestamped'].append(timestamped_deployment)
                    config_file = open(os.path.join(config.project.location,"deploy",'.config'), 'wb')
                    config_file.write(json.dumps(config_file_json))
                    config_file.close()

            thread = DeploymentHandler(config.project, destination, self.params, deploy_metadata)
            threads.append(thread)
            thread.start()  
        
        deploy_results = []
        for thread in threads:
            thread.join()  
            deploy_results.append(thread.result)
                
        if self.args.respond_with_html == True:
            html = util.generate_html_response(self.args.operation, deploy_results, self.params)
            response = util.generate_success_response(html, "html") # returns json
            response['deploy_success'] = True
            # if deployment to one org fails, the entire deploy was not successful
            for result in deploy_results:
                if result['success'] == False:
                    response['deploy_success'] = False
                    break
            return json.dumps(response)
        else:
            return json.dumps(deploy_results,index=4)