Ejemplo n.º 1
0
 def add_records_to_sync(self,folder_path):
     toolJsonData = FileUtils.returnJsonFromFiles(
                     folder_path, "data.json")
     SyncHelperService.validate_sync_data_from_json(toolJsonData)
     sync_id = None
     type = None
     for rec in toolJsonData:
         sync_id = rec.get("sync_id")
         type = rec.get("type").lower()
         if rec is not None:
             # THIS IS USED IN CLEANER SERVICES
             rec["stored_folder_name"] = folder_path
             idd = self.syncDb.add_sync(rec)
             print " New record was added in Sync with _id :" + str(idd) + " and type :" + rec.get("type") + " and sync_id :" + rec.get("sync_id")
         else:
             raise ValueError(
                 " No data found for :") + folder_path
     try:
         # Distribution_list can be empty # full_sync_flag is
         # mandatory
         full_sync_flag, distribution_list = SyncHelperService.get_distribution_list_and_status(
             self.result)
         SyncHelperService.add_notify(
             sync_id, distribution_list, mailer)
     except Exception as e_value:  # catch *all* exceptions
         print 'checkPendingImports: An email will not be sent due to error :' + str(e_value)         
Ejemplo n.º 2
0
def create_lock(rec, status, **kwargs):
    lock_directory = PushHelperService.get_folder_to_push_to(rec)
    createFolder(lock_directory, **kwargs)
    if rec.get("zip_file_name") and str(
            rec.get("zip_file_name")).strip() <> "":
        SyncHelperService.create_lock(
            rec, lock_directory, rec["zip_file_name"] + ".lock",
            str(len(rec["file_list"])) + "," + status.upper(), **kwargs)
    else:
        raise Exception("zip_file_name not found in SyncRequest collection")
Ejemplo n.º 3
0
 def cleanSync(self):
     print 'cleanSync:started'
     sync_ids = []
     for rec in self.syncDb.sync_all():
         if rec.get("status") <> None and str(
                 rec.get("status")).lower() in ["success", "skipped"]:
             if str(rec["sync_id"]) not in sync_ids:
                 sync_ids.append(str(rec["sync_id"]))
     # Clean
     for sync_id in sync_ids:
         SyncHelperService.clean_processed_sync(sync_id)
     print 'cleanSync:ended'
Ejemplo n.º 4
0
 def job_function(self):
     """Start of SyncServices"""
     print 'started running at ' + time.ctime(time.time())
     print 'job_function:Will check if files are pending to process '
     self.load_configuration()
     SyncInputDataHelper().run_service(
     )  # CHECK IF NEW DATA IS AVAILABLE TO LOAD
     while self.syncDb.get_pending_sync_to_compare() is not None\
      or self.syncDb.get_pending_sync_to_process() is not None:
         try:
             SyncHelperService.compare_tools_for_sync()
             SyncHelperService.compare_dus_for_sync(
             )  # DU SHOULD ALWAYS SYNC FIRST
             SyncHelperService.compare_duset_for_sync()
             SyncHelperService.compare_states_for_sync(
             )  # DU/DUSET SHOULD ALWAYS SYNC FIRST
             self.process_additional_data()
             self.process_bulk_upload()
             self.process_sync()
         except Exception as e:
             raise e
         finally:
             self.callbackUrl()
     self.cleanSync()
     print 'ended running at ' + time.ctime(time.time())
Ejemplo n.º 5
0
def check_if_lock_exists(rec, **kwargs):
    lock_directory = PushHelperService.get_folder_to_push_to(rec)
    if rec.get("zip_file_name") and str(
            rec.get("zip_file_name")).strip() <> "":
        return SyncHelperService.read_lock(rec, lock_directory,
                                           rec["zip_file_name"] + ".lock",
                                           **kwargs)
Ejemplo n.º 6
0
 def process_additional_data(self):
     """Start of Processing process_additional_data"""
     # Gets all record for minimum sync_id sorted by _id and created_time
     pending_sync_list = self.syncDb.get_pending_sync_to_process()
     if pending_sync_list and pending_sync_list.count() > 0:
         for record in pending_sync_list:
             print " process: Processing _id :" + str(record["_id"])
             try:
                 if not self.syncDb.get_sync_by_id(str(record["_id"])).get(
                         "processed_additional_data", False):
                     directory_to_import_from = SyncHelperService.get_source_dir(
                         record)
                     if directory_to_import_from:
                         SyncHelperService.handle_additional_data_while_processing_sync(
                             directory_to_import_from, plugin_full_path)
                         self.syncDb.update_add_processed_by_sync_id(
                             record["sync_id"])
             except Exception as e_value:  # catch *all* exceptions
                 traceback.print_exc()
                 self.syncDb.update_sync_status(
                     str(record["_id"]), "failed",
                     "Processing additional data failed with error" +
                     str(e_value))
Ejemplo n.º 7
0
 def process_sync(self):
     """Start of Processing"""
     sync_id = sync_type = directory_to_import_from = None
     # Gets all record for minimum sync_id sorted by _id and created_time
     pending_sync_list = self.syncDb.get_pending_sync_to_process()
     if pending_sync_list:
         print 'No of Pending Sync List to process:' + str(
             pending_sync_list.count())
     else:
         print 'process:No of Pending Sync List to process:' + str(
             0) + ". Existing.."
         return
     for record in pending_sync_list:
         print " process: Processing _id :" + str(record["_id"])
         try:
             sync_id, sync_type = SyncHelperService.get_sync_id_and_type(
                 record)
             directory_to_import_from = SyncHelperService.get_source_dir(
                 record)
             distribution_list = None
             self.handle_extra_validations(record)
             print 'Process: Working on sync_id :' + sync_id + " _id :" + str(
                 record["_id"])
             full_sync_flag, distribution_list = SyncHelperService.get_distribution_list_and_status(
                 self.result
             )  # Distribution_list can be empty # full_sync_flag is mandatory
             self.handle_operation(record, full_sync_flag,
                                   directory_to_import_from)
             print 'Process: Done on sync_id :' + sync_id + " _id :" + str(
                 record["_id"])
         except Exception as e_value:  # catch *all* exceptions
             traceback.print_exc()
             self.syncDb.update_sync_status(str(record["_id"]), "failed",
                                            str(e_value))
     # Notify Users
     SyncHelperService.notify(sync_id, distribution_list, self.mailer)
Ejemplo n.º 8
0
    def createZipToExport(self, export_details, filters_to_apply=None):
        """Start Creating Zip File To Export"""
        # CONSTANTS
        file_path = export_details.get("file_path")
        zip_file_name = export_details.get("zip_file_name")
        target_host = export_details.get("target_host", "Multiple")
        sync_type = export_details.get("sync_type", "")
        download_artifacts = export_details.get("download_artifacts", True)
        get_tools = export_details.get("get_tools", True)
        get_dus = export_details.get("get_dus", True)
        get_du_sets = export_details.get("get_du_sets", True)
        get_states = export_details.get("get_states", True)
        download_build_created_after_date = None
        external_artifacts = str(
            export_details.get("external_artifacts")).lower() == "true"
        if filters_to_apply:
            if str(filters_to_apply.get("type")).lower() in ["tool"]:
                get_dus = False
                get_du_sets = False
                get_states = False
            if str(filters_to_apply.get("type")).lower() in ["du"]:
                get_tools = False
            if filters_to_apply.get("time_after"):
                download_build_created_after_date = str(
                    filters_to_apply.get("time_after"))
                filters_to_apply.pop("time_after")
        error_prefix = "[" + sync_type + \
            "][TargetHost:" + target_host + "][sync_id:" + str(zip_file_name) + "]: "
        file_path = file_path + "/" + \
            str(zip_file_name) + '_' + str(sync_type)
        artifact_path = file_path + "/artifacts"
        media_file_path = file_path + "/mediaFiles"
        logo_file_path = file_path + "/logos"
        plugins_path = file_path + "/plugins"
        general_file_details = file_path + '/generalData.json'
        systemDetail = self.systemDetailsDb.get_system_details_single()
        tags_file_details = file_path + '/tagsData.json'
        fa_file_details = file_path + '/faData.json'
        er_file_details = file_path + '/erData.json'
        repository_file_details = file_path + '/reData.json'
        state_status_file_details = file_path + '/ssData.json'
        prerequisites_file_details = file_path + '/preRequisitesData.json'
        data_file = file_path + '/data.json'
        not_exported_list_reason = file_path + '/notExportedListReason.json'
        exported_list = file_path + '/exportedList.json'
        not_exported_list = file_path + '/notExportedList.json'
        tool_data = []
        tool_names = []
        not_exported_tool_names = []
        not_exported_tool_names_and_reason = []
        du_data = []
        du_names = []
        not_exported_du_names = []
        not_exported_du_names_and_reason = []
        du_sets_names = []
        du_sets_data = []
        not_exported_du_sets_names = []
        not_exported_du_sets_names_and_reason = []
        states_data = []
        states_names = []
        not_exported_states_names = []
        not_exported_states_names_and_reason = []
        try:
            general_file = {
                "date": str(datetime.now()),
                "source_host": systemDetail.get("hostname"),
                "source_port": systemDetail.get("port"),
                "source_dpm_version": systemDetail.get("dpm_version"),
                "target": target_host,
                "sync_id": zip_file_name,
                "filters_applied": filters_to_apply,
                "extract_builds_after_date":
                str(download_build_created_after_date)
            }

            SyncHelperService.new_sync(file_path, target_host,general_file,
                                    artifact_path, media_file_path, logo_file_path,
                                    general_file_details, tags_file_details, prerequisites_file_details,\
                                    plugins_path,fa_file_details,er_file_details,repository_file_details,state_status_file_details)

            #COPY PLUGINS
            self.handle_plugins_copy(plugins_path)

            if get_tools:
                tool_data, tool_names, not_exported_tool_names, not_exported_tool_names_and_reason = SyncHelperService.export_tools_for_new_sync(
                    target_host, zip_file_name, logo_file_path, error_prefix,
                    media_file_path, download_artifacts, artifact_path,
                    download_build_created_after_date, filters_to_apply,
                    external_artifacts)
            if get_dus:
                du_data, du_names, not_exported_du_names, not_exported_du_names_and_reason = SyncHelperService.export_dus_for_new_sync(
                    target_host, zip_file_name, logo_file_path, error_prefix,
                    media_file_path, download_artifacts, artifact_path,
                    download_build_created_after_date, filters_to_apply,
                    external_artifacts)
            if get_du_sets:
                du_sets_data, du_sets_names, not_exported_du_sets_names, not_exported_du_sets_names_and_reason = SyncHelperService.export_du_sets_for_new_sync(
                    target_host, zip_file_name, logo_file_path, error_prefix,
                    media_file_path, download_artifacts,
                    download_build_created_after_date, filters_to_apply)
            if get_states:
                states_data, states_names, not_exported_states_names, not_exported_states_names_and_reason = SyncHelperService.export_states_for_new_sync(
                    target_host, zip_file_name, filters_to_apply)

            if filters_to_apply.get("approval_status", "any").lower(
            ) != "any" or filters_to_apply.get("package_state_name", "") != "":
                self.filter_data(states_data, du_data, du_sets_data, du_names,
                                 du_sets_names, not_exported_du_names,
                                 not_exported_du_names_and_reason,
                                 not_exported_du_sets_names,
                                 not_exported_du_sets_names_and_reason)

            data = self.add_general_details(
                tool_data + du_data + du_sets_data + states_data, general_file,
                sync_type)

            if len(data) > 0:
                FileUtils.jsontoFile(data_file, data)
                FileUtils.jsontoFile(
                    exported_list, {
                        "tools": tool_names,
                        "dus": du_names,
                        "duset": du_sets_names,
                        "states": states_names
                    })
                FileUtils.jsontoFile(
                    not_exported_list, {
                        "tools": not_exported_tool_names,
                        "dus": not_exported_du_names,
                        "duset": not_exported_du_sets_names,
                        "states": not_exported_states_names
                    })
                FileUtils.jsontoFile(
                    not_exported_list_reason, {
                        "tools": not_exported_tool_names_and_reason,
                        "dus": not_exported_du_names_and_reason,
                        "duset": not_exported_du_sets_names_and_reason,
                        "states": not_exported_states_names_and_reason
                    })
                FileUtils.createZipFile(file_path, file_path)
                return file_path + ".zip", tool_names + du_names+du_sets_names+states_names, \
                    not_exported_tool_names_and_reason + not_exported_du_names_and_reason + not_exported_du_sets_names_and_reason + not_exported_states_names_and_reason
            else:
                raise Exception("Could not export any entities")
        except Exception as e_value:  # catch *all* exceptions
            print 'SyncServices Error :' + error_prefix + str(e_value)
            traceback.print_exc()
            raise ValueError(str(e_value))
        finally:
            try:
                if os.path.exists(file_path):
                    shutil.rmtree(file_path, ignore_errors=True)
            except Exception as e_value:  # catch *all* exceptions
                pass
Ejemplo n.º 9
0
    def updatetool(self,
                   tool,
                   full_sync_flag="false",
                   directory_to_import_from=None):
        """Start Tool Update """
        tooldata = tool.get("tool_data")
        localTool = self.toolDB.get_tool_by_name(tooldata["name"])
        ToolHelperService.check_if_tool_data_is_valid(tooldata, localTool)
        tool_id = str(localTool.get("_id"))
        try:
            if tooldata.get("operation").lower() == "update":
                ToolHelperService.add_update_tool(tooldata, tool_id,
                                                  self.logo_path,
                                                  directory_to_import_from,
                                                  self.full_logo_path)
            versions = tooldata.get("versions")
            for record in versions:
                if record.get("operation") not in [
                        "delete", "update", "insert"
                ]:
                    continue
                VersionsData = record
                localVersion = self.versionsDB.get_version_by_tool_id_name_and_number(
                    tool_id, VersionsData["version_name"],
                    VersionsData["version_number"])
                if localVersion:
                    version_id = str(localVersion["_id"])
                if record.get("operation").lower(
                ) == "delete" and full_sync_flag == "true":
                    SyncHelperService.delete_version_and_related_builds(
                        version_id)
                else:
                    # HANDLE VERSION
                    # WE SEE THAT THE VERSION HAS TO BE BE UPDATED OR INSERTED

                    # IF ITS A EXISTING VERSION WE WILL ALREADY HAVE VERSION_ID
                    if record.get("operation").lower() == "update":
                        ToolHelperService.add_update_version(
                            VersionsData, tool_id, version_id, False)
                    # IF ITS A NEW VERSION
                    if record.get("operation").lower() == "insert":
                        version_id = ToolHelperService.add_update_version(
                            VersionsData, tool_id, None, False)

                    # HANLDE BUILD
                    if VersionsData.get('build') is not None and len(
                            VersionsData.get('build')) > 0:
                        builds_handled = [
                        ]  # WE need to deactivate all other builds
                        for build in VersionsData.get('build'):
                            BuildHelperService.add_update_build(
                                build, version_id,
                                join(
                                    directory_to_import_from,
                                    os.path.join(
                                        "artifacts",
                                        VersionsData["repository_to_use"])))
                            builds_handled.append(build["build_number"])
                        # SUPPOSE THE ACCOUNT SENDS 2 BUILDS THAT ARE  ACTIVE THEY WILL BE HANDLED
                        # BUT ALL OTHER BUILDS SHOULD BE MADE INACTIVE IN LOCAL
                        for build in self.buildsDB.get_all_builds(version_id):
                            if build["build_number"] not in builds_handled:
                                build_id = build.get("_id")
                                build["_id"] = {}
                                build["_id"]["oid"] = str(build_id)
                                build["status"] = "0"
                                self.buildsDB.update_build(build)

                    # HANLDE DOCUMENT
                    if VersionsData.get('document') is not None \
                            and len(VersionsData.get('document')) > 0:
                        HelperServices.add_update_documents(
                            VersionsData['document']['documents'], version_id)

                    # HANLDE DEPLOYMENT FIELDS
                    if VersionsData.get('deployment_field') is not None \
                            and len(VersionsData.get('deployment_field')) > 0:
                        HelperServices.add_update_deployment_fields(
                            VersionsData['deployment_field']['fields'],
                            version_id)

                    # HANLDE MEDIA FILES
                    if VersionsData.get('media_file') is not None \
                            and len(VersionsData.get('media_file')) > 0:
                        HelperServices.add_update_media_files(
                            VersionsData['media_file']['media_files'],
                            version_id, directory_to_import_from,
                            self.full_media_files_path, self.media_files_path)
            return {
                "result": "success",
                "message": tooldata["name"] + " was updated"
            }
        except Exception as e_value:  # catch *all* exceptions
            traceback.print_exc()
            return {"result": "failed", "message": str(e_value)}
Ejemplo n.º 10
0
 def compare(self):
     """Comparing sync data"""
     try:
         print "compare: comparing sync data"
         tools = self.distributionSync.GetNewDistribution()
         if tools and tools.count() > 0:
             for tool in tools:
                 tool["changed_object"] = []
                 try:
                     modified = 0
                     tool_id = {}
                     tool_id["oid"] = str(tool["_id"])
                     tool["_id"] = tool_id
                     distributedTool = tool.get("tool_data")
                     # VALIDATE THE TOOL DETAILS
                     ToolHelperService.validate_tool_data(distributedTool)
                     tool_name = distributedTool["name"]
                     ltooldata = self.toolDB.get_tool_by_name(tool_name)
                     if ltooldata is None:
                         print "compare: tool " + tool_name + " was not found in local DB."
                         tool["operation"] = "insert"
                         tool["status"] = "compared"
                         tool["updated_time"] = datetime.now()
                         distributedTool["operation"] = "insert"
                         modified = 1
                         tool["tool_data"] = distributedTool
                         # updated=1
                         updated = self.distributionSync.UpdateDistribution(
                             tool)
                         if updated:
                             print "compare:This tool will be created while processing"
                         else:
                             print "compare: Unable to trigger tool creation while processing"
                         continue
                     else:
                         print "compare: tool " + tool_name + " was found in local DB."
                         sub = SyncHelperService.compare_tool_data(
                             copy.deepcopy(distributedTool),
                             copy.deepcopy(ltooldata))
                         if sub:
                             print "compare:tool_data of tool " + tool_name + " is required to be updated as the tool data has changed."
                             distributedTool["operation"] = "update"
                             tool["operation"] = "update"
                             toolchange = {}
                             toolchange["tool"] = str(sub)
                             tool["changed_object"].append(toolchange)
                             modified = 1
                         else:
                             print "compare: tool_data of tool " + tool_name + " is not required to be updated.As tool data has not changed"
                             distributedTool["operation"] = ""
                             modified = 0
                         syncversions = []
                         for version in distributedTool["versions"]:
                             localversion, isActive = list(
                                 self.versionsDB.
                                 get_version_detail_and_status(
                                     str(ltooldata["_id"]),
                                     version["version_number"],
                                     version["version_name"], True))
                             if localversion:
                                 ToolHelperService.get_dependend_tools(
                                     localversion, True)
                                 if isActive:
                                     sub = SyncHelperService.compare_version_data(
                                         copy.deepcopy(version),
                                         copy.deepcopy(localversion))
                                     if sub:
                                         print "compare:versions of  tool " + tool_name + " is required to be updated as the version has changed."
                                         version["operation"] = "update"
                                         modified = 1
                                         versionchange = {}
                                         versionchange["version"] = str(sub)
                                         tool["changed_object"].append(
                                             versionchange)
                                         # version["version_id"]= str(localversion["_id"])
                                     else:
                                         print "compare:versions of tool " + tool_name + " is not required to be updated.As versions has not changed"
                                         version["operation"] = ""
                                 else:
                                     version["operation"] = "update"
                                     modified = 1
                                     versionchange = {}
                                     versionchange["version"] = str(
                                         "Version is not active")
                                     tool["changed_object"].append(
                                         versionchange)
                             else:
                                 version["operation"] = "insert"
                                 modified = 1
                                 versionchange = {}
                                 versionchange["version"] = str(
                                     "Version was not found")
                                 tool["changed_object"].append(
                                     versionchange)
                             syncversions.append(version)
                         tool["status"] = "compared"
                         tool["updated_time"] = datetime.now()
                         distributedTool["versions"] = syncversions
                     tool["tool_data"] = distributedTool
                     if modified == 1 and tool["operation"] == "":
                         tool["operation"] = "update"
                     elif modified == 0:
                         tool["operation"] = ""
                         tool["status"] = "success"
                         tool[
                             "status_message"] = "No difference was found while comparing"
                     updated = self.distributionSync.UpdateDistribution(
                         tool)
                 except Exception as e_value:  # catch *all* exceptions
                     print 'DistributionSyncServices-Compare :' + str(
                         e_value)
                     traceback.print_exc()
                     self.distributionSync.UpdateDistributionStatus(
                         str(tool["_id"]["oid"]), "failed",
                         "Comparing failed with error :" + str(e_value))
             # SEND EMAIL ONLY IF WE HAVE NEW TOOLS
             try:
                 self.notifyCompared(self.result.get("distribution_list"))
             except Exception as e_value:  # catch *all* exceptions
                 print 'compare : An email will not be sent due to error :' + str(
                     e_value)
     except Exception as e_value:  # catch *all* exceptions
         traceback.print_exc()
         print 'DistributionSyncServices-Compare :' + str(e_value)
Ejemplo n.º 11
0
 def checkPendingImports(self):
     """Checking Pending Import """
     try:
         onlyfiles = [
             f for f in listdir(self.current_import_path)
             if isfile(join(self.current_import_path, f))
             if "DPM_tools_manifest" in str(f)
             and not str(f).endswith("_done.zip")
         ]
         if onlyfiles is not None:
             if len(onlyfiles) <= 0:
                 print "No pending zip files to process"
                 return
         for selected_file in onlyfiles:
             try:
                 file_path = join(self.current_import_path, selected_file)
                 file_name = os.path.basename(file_path)
                 file_name_without_ext = os.path.splitext(file_name)[0]
                 print " Processing file :" + file_path
                 if os.path.isfile(
                         join(self.current_import_path,
                              file_name_without_ext) + '_done.zip'):
                     print join(self.current_import_path, file_name_without_ext) \
                         + '_done.zip' + ' was found. Deleting it'
                     os.remove(
                         join(self.current_import_path,
                              file_name_without_ext) + '_done.zip')
                 if os.path.exists(
                         join(self.current_import_path,
                              file_name_without_ext)):
                     print join(self.current_import_path, file_name_without_ext) \
                         + ' was found. Deleting it'
                     shutil.rmtree(
                         join(self.current_import_path,
                              file_name_without_ext))
                 print 'checkPendingImports : Am processing ' \
                     + file_path
                 folder_path = \
                     os.path.normpath(FileUtils.unzipImportFile(file_path))
                 toolJsonData = FileUtils.returnJsonFromFiles(
                     folder_path, 'data.json')
                 SyncHelperService.validate_sync_data_from_json(
                     toolJsonData, False)
                 generalJsonData = FileUtils.returnJsonFromFiles(
                     folder_path, "generalData.json")
                 if generalJsonData is None:
                     raise ValueError(
                         "generalData.json was not found inside the zip file"
                     )
                 self.distributionSync.CancelAllDistributions()
                 for rec in toolJsonData:
                     rec = self.updatePaths(
                         join(self.current_import_small_path,
                              os.path.basename(folder_path)), rec)
                     # THIS IS USED IN CLEANER SERVICES
                     rec["stored_folder_name"] = folder_path
                     idd = self.distributionSync.AddDistribution(rec)
                     print " New tool " + rec.get("tool_data").get(
                         "name") + " was added in Sync with _id :" + str(
                             idd)
                 try:
                     self.add_notify(self.result.get("distribution_list"))
                 except Exception as e_value:  # catch *all* exceptions
                     print 'Email will not be sent due to error :' \
                         + str(e_value)
                 FileUtils.renameFile(
                     file_path,
                     join(self.current_import_path,
                          os.path.splitext(file_name)[0] + "_done.zip"))
                 print 'checkPendingImports: Am done processing ' \
                     + join(self.current_import_path, os.path.splitext(file_name)[0]
                            + '_done.zip')
             except Exception as e_value:  # catch *all* exceptions
                 print 'checkPendingImports: File :' \
                     + str(file_path) + \
                     ' was skipped due to error ' + str(e_value)
                 FileUtils.renameFile(
                     file_path,
                     join(
                         self.current_import_path,
                         os.path.splitext(file_name)[0] + "_failed_as_" +
                         str(e_value).replace(" ", "_") + "_done.zip"))
     except Exception as e_value:  # catch *all* exceptions
         traceback.print_exc()
         print 'checkPendingImports: Error while unzipping pending files :' + str(
             e_value)
Ejemplo n.º 12
0
def upload_manual_sync_file():
    file_path=None
    folder_path=None
    try:
        sync_id = None
        inserted_ids = []
        # Get the name of the uploaded file
        file = request.files['file']
        if file is None:
            raise ValueError("No file selected")
        filename = ('.' in file.filename and
                    file.filename.rsplit('.', 1)[1] in ['zip'])
        if filename not in [True]:
            raise Exception("Invalid file .Please select file of type 'zip'")
    # Check if the file is one of the allowed types/extensions
        if file and filename:
            # Make the filename safe, remove unsupported chars
            filename = secure_filename(file.filename)
            file_name_without_ext = filename.split(".")[0]
            import_path = str(import_full_path)
            temp_folder_path=str(import_full_path + '/' + file_name_without_ext)
            if os.path.isfile(temp_folder_path + "_done.zip") or os.path.exists(temp_folder_path):
                raise Exception("This file was already requested")
            folder_path = temp_folder_path
            file_path = str(import_full_path + '/' + filename)
            if os.path.isfile(file_path): os.remove(file_path)
            file.save(file_path)
            folder_path = os.path.normpath(
                FileUtils.unzipImportFile(file_path))
            toolJsonData = FileUtils.returnJsonFromFiles(
                folder_path, "data.json")
            SyncHelperService.validate_sync_data_from_json(toolJsonData)
            for rec in toolJsonData:
                # THIS IS USED IN CLEANER SERVICES
                rec["stored_folder_name"] = folder_path
                if request.form.get('callback_url'):rec["callback_url"]=request.form.get('callback_url')
                inserted_ids.append(syncDb.add_sync(rec))
                if not sync_id:
                    sync_id = rec.get("sync_id")
            FileUtils.renameFile(file_path, join(
                import_path, os.path.splitext(filename)[0] + "_done.zip")) 
            if request.form.get("skip_process_ind","false").lower()=="true":
                return jsonify(json.loads(dumps({"result": "success", "message":"File uploaded successfully.","data":sync_id}))), 200
            else:
                try:
                    syncService.job_function()
                except Exception as e:  # catch *all* exceptions
                    print str(e)
                sync_data=syncService.analyse_sync_details(sync_id,False)
                return jsonify(json.loads(dumps({"result": "success", "message": "File was uploaded successfully. " +
                                 sync_data.get("added") + " entities were processed.Success: " +
                                 sync_data.get("success_count")  + " Failed: " + sync_data.get("failed_count"),
                                 "data": sync_data.get("data")}))), 200
    except Exception as e:  # catch *all* exceptions
        if file_path is not None:
            if os.path.isfile(file_path):
                os.remove(file_path)
        if folder_path is not None:
            if os.path.exists(folder_path):
                shutil.rmtree(folder_path)
                if os.path.isfile(folder_path + "_done.zip"):
                    os.remove(folder_path + "_done.zip")
        for ids in inserted_ids:
            syncDb.remove_sync(str(ids))
        raise e    
Ejemplo n.º 13
0
 def put(self):
     data = request.json
     return {"result": "success", "message": str(SyncHelperService.retry_sync(data.get("_id"),data.get("sync_id")))+" request updated successfully" }, 200