def assign_texts(lng_key: str): """ Assignment of the texts to the messages/commands according to language :param lng_key: language key :return: None (setup global assignment of texts) """ text_assignments = {} LOGGER.debug(f"Assign {languages[lng_key].get(NAME)} texts " f"to messages and commands.") # Messages for i in range(0, len(texts[MSG]) - 1): text_assignments[texts[MSG].get(i)] = texts[lng_key].get(i) # Commands for i in range(0, len(texts[CMD]) - 1): text_assignments[texts[CMD].get(i)] = \ texts[lng_key].get(i + len(texts[MSG]) - 1) # Message 'Update' text_assignments[texts[MSG].get(11)] = texts[lng_key].get(33) # Command 'Standby' text_assignments[texts[CMD].get(22)] = texts[lng_key].get(34) languages[lng_key].update(text_assignments) global assignment assignment = languages LOGGER.debug(f"{languages[lng_key].get(NAME)} text library built.")
def update_issue_status(status_list): for item in status_list: status = item['status'].upper() key = item['key'] update_query = DbJiraIssues.update(dc_status=status).where(DbJiraIssues.key == key) update_query.execute() LOGGER.debug(update_query)
def app_list(self): """ Get the list of known apps. """ LOGGER.debug("Retrieve (short) app list") applist = self._cf_db.select(CFApps, ['guid', 'name']) return applist
def _generate_cmd_queue(self, cdb, args): command_queue = [] for compilation_unit in cdb: arguments = [] directory = Path(compilation_unit["directory"]).absolute() full_command = compilation_unit["arguments"] absolute_filename = directory / compilation_unit["file"] compiler = Path(full_command[0]).name.lower() arguments.extend(self.quote_defines(full_command[1:])) arguments = self.add_additions(arguments) arguments = self.filter_arguments(arguments) arguments = self.convert_includes(arguments) arguments.extend(self.includes_as_cli_flags(self.default_includes())) default_args = self.config.getlist("default_args") extra = f"--quiet {' '.join(default_args)}" if compiler.endswith("cl.exe"): arguments = list(map(self.convert_arguments, arguments)) extra = f"{extra} --extra-arg-before=--driver-mode=cl" if absolute_filename.is_file(): if self.should_scan(absolute_filename, args.file): absoluteBinaryPath = Path(self.binary).resolve() tmp_cmd = f"cd {directory} && {absoluteBinaryPath} {extra} {absolute_filename} -- {' '.join(arguments)}" command_queue.append(tmp_cmd) else: log.debug(f"File {absolute_filename} is not scanned") return command_queue
def run_theater(strip): LOGGER.debug("running...") from control import get_stop_flag while not get_stop_flag(): try: set_brightness_depending_on_daytime(strip) color_wipe_full(strip, Color(127, 0, 0)) # Red wipe if not get_stop_flag(): color_wipe_full(strip, Color(0, 127, 0)) # Green wipe if not get_stop_flag(): color_wipe_full(strip, Color(0, 0, 127)) # Blue wipe if not get_stop_flag(): color_wipe_full(strip, Color(127, 127, 127)) # White wipe if not get_stop_flag(): theater_chase(strip, Color(127, 127, 127)) # White theater chase if not get_stop_flag(): theater_chase(strip, Color(0, 0, 127)) # Blue theater chase if not get_stop_flag(): theater_chase(strip, Color(0, 127, 0)) # Green theater chase if not get_stop_flag(): theater_chase(strip, Color(127, 0, 0)) # Red theater chase except KeyboardInterrupt: LOGGER.warn("KeyboardInterrupt") exit() except Exception as e: LOGGER.error(f"Any error occurs: {e}") exit() clear(strip)
def __config_yum(self): LOGGER.debug( "=================================== config yum(%s) ===================================" % self.host["hostname"]) commands = ["rm -rf /etc/yum.repos.d/*"] utils.ssh_execute(commands, self.host) commands = list() for yum in self.yum: commands.append("echo '[%s]' >> /etc/yum.repos.d/openstack.repo" % yum["name"]) commands.append( "echo 'name=%s' >> /etc/yum.repos.d/openstack.repo" % yum["name"]) commands.append( "echo 'baseurl=%s' >> /etc/yum.repos.d/openstack.repo" % yum["url"]) commands.append( "echo 'gpgcheck=0' >> /etc/yum.repos.d/openstack.repo") commands.append( "echo 'enabled=1' >> /etc/yum.repos.d/openstack.repo") commands.append("echo '\n' >> /etc/yum.repos.d/openstack.repo") commands.append("yum clean all") utils.ssh_execute(commands, self.host)
def start_vpn(self): LOGGER.debug("Client start_vpn") self.running = True self.recv_thread = threading.Thread(target=self.handle_recv) self.recv_thread.start() self.traffic_thread = threading.Thread(target=self.handle_traffic) self.traffic_thread.start()
def ensure(self, entity_cls, **attrs): """Ensures that a Satellite entity of the given class exists and has its attributes set to the given values. :param type entity_cls: The (nailgun) class of the Satellte entity to be managed Other named parameters are taken as attributes for the manage entity. :returns: A pointer to the entity that was created that can be assigned to other entities` link attributes (Please do no assume that is an entity object - this is subject to change) """ template = self.entity_from_attrs(entity_cls, attrs) LOGGER.debug('template: %s', pformat(template.get_values())) existing = self.find_by_key(entity_cls, **attrs) if existing: existing_data = existing[0].read() LOGGER.debug('existing: %s', pformat(existing_data.get_values())) template.id = existing_data.id if self.similar_entities(existing_data, template): LOGGER.info('Unchanged entitiy: %s', self.format_entity(existing_data)) return existing_data else: self.log_entity_diff(existing_data, template) return self.update_or_create(template)
def add_misp_tag_to_event(self, event_id, tag_id): """ Add MISP tag to MISP event :param event_id: :param tag_id: :return: """ LOGGER.debug('Adding DDoSCH tag to the event') if not self.verify_tls: urllib3.disable_warnings() response = requests.post( f'{self.protocol}://{self.host}/events/addTag/{event_id}/{tag_id}', headers={ 'Authorization': self.token, 'Accept': 'application/json' }, timeout=10, verify=self.verify_tls) LOGGER.debug(f'status: {response.status_code}') try: response.raise_for_status() return response.json() except requests.HTTPError: LOGGER.critical( f'Creating MISP Tag responded with status code:{response.status_code}' ) return None
def add_misp_tag(self, tag_name, tag_color) -> Optional[dict]: """ Create a new tag in MISP :param tag_name: Name of the new tag :param tag_color: Color of the new tag :return: Server response if succesful, else None """ LOGGER.debug(f'Creating a {tag_name} tag in MISP') if not self.verify_tls: urllib3.disable_warnings() response = requests.post(f'{self.protocol}://{self.host}/tags/add', json={ 'name': tag_name, 'colour': tag_color }, headers={ 'Authorization': self.token, 'Accept': 'application/json' }, timeout=10, verify=self.verify_tls) try: response.raise_for_status() return response.json() except requests.HTTPError: LOGGER.critical( f'Creating MISP Tag responded with status code:{response.status_code}' ) return None
def run_rainbow(strip): LOGGER.debug("running...") from control import get_stop_flag while not get_stop_flag(): try: set_brightness_depending_on_daytime(strip) for j in range(256 * 5): if not get_stop_flag(): for i in range(strip.numPixels()): if not get_stop_flag(): strip.setPixelColor( i, wheel((int(i * 256 / strip.numPixels()) + j) & 255)) if not get_stop_flag(): strip.show() time.sleep(.02) except KeyboardInterrupt: LOGGER.warn("KeyboardInterrupt") exit() except Exception as e: LOGGER.error(f"Any error occurs: {e}") exit() clear(strip)
def search_misp_events(self, misp_filter: dict = None) -> Optional[dict]: """ Search for MISP events :param misp_filter: fields by which to filter retrieved MISP events :return: MISP events if found, else None """ LOGGER.debug(f'Searching MISP events with filter: {misp_filter}') if not self.verify_tls: urllib3.disable_warnings() response = requests.post(f'{self.protocol}://{self.host}/events/index', json=misp_filter or dict(), headers={ 'Authorization': self.token, 'Accept': 'application/json' }, timeout=10, verify=self.verify_tls) try: response.raise_for_status() return response.json() except requests.HTTPError: LOGGER.critical( f'Retrieving MISP events responded with status code:{response.status_code}' ) return None
def get_outliers(data: pd.DataFrame, column: Union[str, list[str]], fraction_for_outlier: float, use_zscore: bool = True, return_fractions: bool = False, return_others: bool = False) -> list: """ Find the outlier(s) in a pandas DataFrame :param data: DataFrame in which to find outlier(s) :param column: column or combination of columns in the dataframe for which to find outlier value(s) :param fraction_for_outlier: if a value comprises this fraction or more of the data, it is considered an outleir :param use_zscore: Also take into account the z-score to determine outliers (> 2 * std from the mean) :param return_fractions: Return the fractions of traffic occupied by each outlier. :param return_others: in the outliers, return the fraction of "others" - i.e., the non-outlier values combined :return: """ packets_per_value = data.groupby(column).nr_packets.sum().sort_values( ascending=False) fractions = packets_per_value / packets_per_value.sum() zscores = (fractions - fractions.mean()) / fractions.std() LOGGER.debug(f"top 5 '{column}':\n{fractions.head()}") outliers = [ (key, round(fraction, 3)) if return_fractions or return_others else key for key, fraction in fractions.items() if fraction > fraction_for_outlier or (zscores[key] > 2 and use_zscore) ] if len(outliers) > 0: LOGGER.debug(f"Outlier(s) in column '{column}': {outliers}\n") if return_others and (explained := sum( [fraction for _, fraction in outliers])) < 0.99: outliers.append(('others', round(1 - explained, 3)))
def select(self, table, fields=None, where=None, as_dict=True): """ Wrap up a simple generic select. :param table: table object to query :param fields: list of fields to query for ("select X") :param where: match conditions ("where ...") :param as_dict: return dict if true else return cursor :return: dict or cursor result from query """ fields = [fields] if (fields and isinstance(fields, str)) else fields where = [where] if (where and isinstance(where, str)) else where columns = fields if (fields and fields != '*') else table.columns itemspec = '{}'.format(','.join(fields)) if fields else '*' LOGGER.debug("%s table query for items: <%s>", table.name, itemspec) sql = "SELECT {} FROM {}".format(itemspec, table.name) if where: LOGGER.debug("%s table query for match: <%s>", table.name, where) sql += " WHERE {}".format(' AND '.join(where)) if as_dict: retn = self.query_dict(sql, columns) else: retn = self.query(sql) return retn
def rename(self, path, path1): # Rename is handled by copying and deleting files... LOGGER.debug("rename %s %s" % (path, path1)) d = self.get_dir(path) if self.is_valid_file(path) and d.is_file(path): if not self.is_valid_file(path1): # from a valid file to an editor file buf = self.get_file_buf(path1) buf.write(d.read_file(path)) # TODO : remove path ? else: # from a valid file to a valid file # if rename is defined # TODO : with unlink method defined in fs pass elif not self.is_valid_file(path): if self.is_valid_file(path1) and d.is_file(path1): # from an editor file to a valid file buf = self.get_file_buf(path) ret = d.write_to(path1, buf.getvalue()) self.open_mode = None self.remove_file_buf(path) if ret == False: return -errno.EIO elif not self.is_valid_file(path): # from an editor file to an editor file # TODO pass
def checkTrame(self): if self.trameUsed: LOGGER.debug("Trame received : {}".format(self.trameUsed.lessRawView())) if ("A55A" not in self.trameUsed.sep): LOGGER.warn("Wrong separator, rejected") if (self.doChecksum(self.trameUsed) not in self.trameUsed.checkSum): #Mauvais checkSum LOGGER.warn("Wrong checksum, expected : {}, rejected".format(self.doChecksum(self.trameUsed))) with self.lock: if (self.trameUsed.ident in self.identSet): #Recuperer le capteur en bdd sensorUsed = sensor.Sensor.objects(physic_id=self.trameUsed.ident)[0] newData = '' #la nouvelle data a entrer en base, type dynamique if (sensorUsed.__class__.__name__=="Switch"): newData=sensorUsed.translateTrame(self.trameUsed) elif (sensorUsed.__class__.__name__=="Temperature"): newData = sensorUsed.translateTrame(self.trameUsed) elif (sensorUsed.__class__.__name__=="Position"): newData = sensorUsed.translateTrame(self.trameUsed) else : LOGGER.warn("Other Captor (not handle (YET !) )") # Update de la trame au niveau de la base if newData : sensorUsed.update(newData) LOGGER.info(" Sensor {} ||New data {}".format(sensorUsed.physic_id, sensorUsed.current_state)) self.trameUsed=''
def _request(self, url, json=True): """ Send get request to the given url and handle errors. Return json if indicated else raw data. :param url: the target url to send the get request to :param json: true if json result required or raw data if false :return: request response (json or raw) """ LOGGER.debug("Fetcher GET request: %s", url) retn = None try: rsp = requests.get(url, timeout=self._bb_request_time_limit) except HTTPError as err: LOGGER.error("HTTP request error (url %s): %s", url, err) except Exception as exn: LOGGER.error("Unknown error requesting from %s: %s", url, exn) else: if rsp.status_code == requests.codes.ok: retn = rsp.json() if json else rsp.data else: LOGGER.info("Error requesting from BB fetcher: %s", url) LOGGER.debug("Query error %d (%s): %s", rsp.status_code, rsp.reason, rsp.text) return retn
def run_clock3(stripe): LOGGER.debug("running...") from control import get_stop_flag while not get_stop_flag(): try: now = set_brightness_depending_on_daytime(stripe)[0] led_for_hour = int(int(now.hour) % 12 * 2) led_for_minute = int(now.minute // 2.5) leds_per_2500ms = int(round(now.second / 2.5)) _dial(stripe) _seconds(leds_per_2500ms, stripe) _minute(led_for_minute, led_for_hour, stripe) _hour(led_for_hour, stripe) stripe.show() time.sleep(0.2) if leds_per_2500ms == stripe.numPixels(): time.sleep(1.3) clear(stripe) except KeyboardInterrupt: LOGGER.warn("KeyboardInterrupt.") exit() except Exception as e: LOGGER.error(f"Any error occurs: {e}") exit() clear(stripe)
def _get_fetcher_status(self): """ Retrieve the Bitbucket fetcher (cache) status. """ url = "{}/reader_status".format(self._org_url) LOGGER.debug("Requesting BB fetcher reader status (%s)", url) return self._request(url)
def close(self): LOGGER.debug("TAPControl close") self.goOn = False while self.read_thread.is_alive(): time.sleep(0.1) while self.write_thread.is_alive(): time.sleep(0.1)
def run(self): LOGGER.debug("TAPControl run") self.goOn = True self.read_thread = threading.Thread(target=self.handle_read) self.read_thread.start() self.write_thread = threading.Thread(target=self.handle_write) self.write_thread.start()
def _get_space(self, *args): """ Get the data for all spaces or the one(s) specified """ LOGGER.debug("REST requested space data") (_, filters) = args return jsonify(self._cfagent.get_space(self._keys_to_lower(filters)))
def determine_filetype(filenames: list[Path]) -> FileType: """ Determine whether the input files are Flows or PCAPs; if it's neither or a mix, quit. :param filenames: :return: PCAP or FLOW """ filetype = None for filename in filenames: if not filename.exists() or not filename.is_file() or not os.access( filename, os.R_OK): error( f'{filename} does not exist or is not readable. If using docker, did you mount the location ' f'as a volume?') if filename.suffix.lower() == '.pcap' and filetype in [ FileType.PCAP, None ]: filetype = FileType.PCAP elif filename.suffix.lower() == '.nfdump' and filetype in [ FileType.FLOW, None ]: filetype = FileType.FLOW else: if filetype is None: error(f"File extesion '{filename.suffix}' not recognized. " 'Please use .pcap for PCAPS and .nfdump for Flows.') else: error( 'Please use only one type of capture file to create a fingerprint (.pcap or .nfdump)' ) LOGGER.debug(f'Input file type: {filetype}') return filetype if filetype is not None else error( 'No valid input files given.')
def get_all_dashboards(): r = requests.get(search_url, headers=headers) if r.status_code != 200: LOGGER.error("Error search api, code: %s" % r.status_code) LOGGER.debug("return text: %s" % r.text) return json.loads(r.text)
def updateIdentSet(self): """ Safely update the identifier set of the traductor """ for anUpdate in lazzyUpdate.objects: LOGGER.warn("id : {} || state : {}".format(anUpdate.idToUpdate,anUpdate.newState)) if(anUpdate.idToUpdate==""): with self.lock: self.identSet=set([]) for lsensor in sensor.Sensor.objects: self.identSet.add(lsensor.physic_id) LOGGER.info(lsensor.physic_id) LOGGER.info("Traductor's set of captors updated") elif(anUpdate.newState==""): with self.lock: if (anUpdate.idToUpdate in things.physic_id for things in sensor.Sensor.objects): self.identSet.add(anUpdate.idToUpdate) LOGGER.info("{} added".format(anUpdate.idToUpdate)) else: #send a trame from a captor with a newState LOGGER.error("Sensor to update : {} ||new state : {}".format(anUpdate.idToUpdate,anUpdate.newState)) self.sendTrame(anUpdate.idToUpdate,anUpdate.newState) anUpdate.delete() LOGGER.warn(" {} update GROS delete de : {} || {}".format(lazzyUpdate.objects.count(),anUpdate.idToUpdate,anUpdate.newState)) return LOGGER.debug("nothing to update")
def run_clock6(strip): LOGGER.debug("running...") from control import get_stop_flag while not get_stop_flag(): try: hour_value, minute_value = _get_pointer(strip)[:2] # arc mode intensity = 100 for i in range(strip.numPixels()): # calculates a faded arc from low to maximum brightness h = _get_color_value(i, hour_value, intensity=intensity) m = _get_color_value(i, minute_value, intensity=intensity) red, green, blue = 0, m, h color = Color(red, green, blue) strip.setPixelColor(i % 24, color) strip.show() time.sleep(0.1) except KeyboardInterrupt: LOGGER.warn("KeyboardInterrupt.") exit() except Exception as e: LOGGER.error(f"Any error occurs: {e}") exit() clear(strip)
def doExecute(self): LOGGER.debug( "=================================== install mariadb(%s) ===================================" % self.host["hostname"]) commands = ["yum install -y mariadb mariadb-server python2-PyMySQL"] commands.append( "sed -i '/^symbolic/acharacter-set-server = utf8' /etc/my.cnf") commands.append( "sed -i '/^symbolic/acollation-server = utf8_general_ci' /etc/my.cnf" ) commands.append( "sed -i '/^symbolic/amax_connections = 4096' /etc/my.cnf") commands.append( "sed -i '/^symbolic/ainnodb_file_per_table = on' /etc/my.cnf") commands.append( "sed -i '/^symbolic/adefault-storage_engine = innodb' /etc/my.cnf") commands.append("systemctl enable mariadb") commands.append("systemctl start mariadb") secure_command = '''(echo "" sleep 1 echo "n" sleep 1 echo "y" sleep 1 echo "y" sleep 1 echo "y" sleep 1 echo "y") | mysql_secure_installation''' commands.append(secure_command) utils.ssh_execute(commands, self.host)
def client_handshake_cb(self, gateway_ip, interface_ip): LOGGER.debug("MainControl client_handshake_cb") if self.connect_cb is not None: self.connect_cb() ipv4_addr = list(interface_ip) ipv4_gateway = list(gateway_ip) ipv4_network = [10, 0, 0, 0] ipv4_netmask = [255, 255, 255, 0] LOGGER.info("MainControl handshake success with interface ip: %s, gateway ip: %s" % (ipv4_addr, ipv4_gateway)) self.sys_hper.init_network(self.server_ip, ipv4_addr, ipv4_gateway, ipv4_network, ipv4_netmask) self.tuntap = open_tun_tap(ipv4_addr, ipv4_network, ipv4_netmask) # filter ffilter = load_filter() filter_type = FILTER_BLACK filter_domains = [] filter_ips = [] if ffilter is not None: ftype = ffilter.get('type') domains = ffilter.get('domains') ips = ffilter.get('ips') if ftype == 'Blacklist': filter_type = FILTER_BLACK elif ftype == 'Whitelist': filter_type = FILTER_WHITE filter_domains = domains.strip().split('\n') filter_ips = ips.strip().split('\n') LOGGER.info("MainControl filter domains:\n%s\nfilter ips:\n%s" % (filter_domains, filter_ips)) self.filter.init_filter(filter_type, filter_domains, filter_ips)
def handle_traffic(self): LOGGER.debug("Client handle_traffic") tick = 0 while self.running: self.rx_rate = self.rx_tmp self.tx_rate = self.tx_tmp self.rx_total += self.rx_tmp self.tx_total += self.tx_tmp self.rx_tmp = 0 self.tx_tmp = 0 if tick % TRAFFIC_SAVE_INTERVAL == 0: traffic = {} traffic['rx'] = self.rx_total traffic['tx'] = self.tx_total save_traffic(traffic) time.sleep(1) tick += 1 # save on stop traffic = {} traffic['rx'] = self.rx_total traffic['tx'] = self.tx_total save_traffic(traffic)
def org_list(self): """ Get the list of known orgs. """ LOGGER.debug("Retrieve (short) org list") orglist = self._cf_db.select(CFOrganizations, ['guid', 'name']) return orglist
def rename(self, path, path1): # Rename is handled by copying and deleting files... LOGGER.debug("rename %s %s" % (path, path1)) d = self.get_dir(path) if self.is_valid_file(path) and d.is_file(path): if not self.is_valid_file(path1): # from a valid file to an editor file buf = self.get_file_buf(path1) buf.write(d.read_file(path)) # TODO : remove path ? else: # from a valid file to a valid file # if rename is defined # TODO : with unlink method defined in fs pass elif not self.is_valid_file(path): if self.is_valid_file(path1) and d.is_file(path1): # from an editor file to a valid file buf = self.get_file_buf(path) ret = d.write_to(path1, buf.getvalue()) self.open_mode = None self.remove_file_buf(path) if ret == False: return -errno.EIO elif not self.is_valid_file(path): # from an editor file to an editor file # TODO pass
def log_response_details(self, response): """Verbose log responses""" log = ( " {0} <{1}> response from <{2}> with headers:<{3}> and body:<{4}>" ).format(response.request.method, response.status_code, response.url, response.headers, response.text) LOGGER.debug(log)
def unlink(self, path): LOGGER.debug("FSdir unlink %s" % (path)) file_name = self.get_article_file_name(path) if self.files.has_key(file_name): self.files.pop(file_name) return True # succeeded else: return False
def clear_traffic(self): LOGGER.debug("Client clear_traffic") self.rx_total = 0 self.tx_total = 0 traffic = {} traffic['rx'] = self.rx_total traffic['tx'] = self.tx_total save_traffic(traffic)
def receive (self) : # LOGGER.debug("en attente de trame") message = self.soc.recv(1024) if message and len(message)==28: LOGGER.debug("trame reçu : {}".format(message)) self.trameUsed = Trame.trame(message) else : return
def utime(self, path, times): LOGGER.debug("utime %s %s" % (path, times)) d = self.get_dir(path) if dir(d).count("utime") == 0: return -errno.ENOSYS # Not implemented else: return d.utime(path, times)
def utime(self, path, times): LOGGER.debug("utime %s %s" % (path, times)) d = self.get_dir(path) if dir(d).count("utime") == 0: return -errno.ENOSYS # Not implemented else: return d.utime(path, times)
def read(self, path, size, offset): LOGGER.debug("read %s %d %d" % (path, size, offset)) self.open_mode = self.READ buf = self.get_file_buf(path) buf.seek(offset) return buf.read(size)
def open(self, path, flags): LOGGER.debug("open %s %d" % (path, flags)) if not self.files.has_key(path): if self.is_valid_file(path): buf = self.get_file_buf(path) d = self.get_dir(path) txt = d.read_file(path) buf.write(txt)
def rmdir(self, path): LOGGER.debug("rmdir %s" % path) d = self.get_dir(path) if dir(d).count("rmdir") == 0: return -errno.EACCES # Permission denied else: res = d.rmdir(path) if res != True: return -errno.EACCES # Permission denied
def mkdir(self, path, mode): LOGGER.debug("mkdir %s %x" % (path, mode)) d = self.get_dir(path) if dir(d).count("mkdir") == 0: return -errno.EACCES # Permission denied else: res = d.mkdir(path) if res != True: return -errno.EACCES # Permission denied
def write(self, path, txt, offset): LOGGER.debug("write %s [...] %d" % (path, offset)) self.open_mode = self.WRITE buf = self.get_file_buf(path) buf.seek(offset) buf.write(txt) return len(txt)
def get_issues(self, issues, limit=300): result = [] keys = ','.join(issues) request = 'project=%s AND key in (%s)' % (self.settings.project, keys) LOGGER.debug(request) response = self.proxy.getIssuesFromJqlSearch(self.get_token(), request, Types.intType(limit)) for item in response: issue = JiraIssue() issue.parse_raw(item) result.append(issue) return result
def create(self, path, mode, dev): # create is called to write a file that does not exist yet LOGGER.debug("create %s %d %d" % (path, mode, dev)) if self.is_valid_file(path): d = self.get_dir(path) # We also need to check if it is a valid file for the fs if dir(d).count("is_valid_file") == 1 and not d.is_valid_file(path): return -errno.EACCES # Permission denied else: return -errno.EACCES # Permission denied self.get_file_buf(path)
def unlink(self, path): LOGGER.debug("unlink %s" % path) d = self.get_dir(path) self.remove_file_buf(path) if self.is_valid_file(path): if dir(d).count("unlink") == 0: return -errno.EACCES # Permission denied else: res = d.unlink(path) if res != True: return -errno.EACCES # Permission denied
def truncate(self, path, size): # Truncate is called just before open when a file is to be written # in order to make it empty LOGGER.debug("truncate %s %d" % (path, size)) buf = self.get_file_buf(path) if self.is_valid_file(path): d = self.get_dir(path) txt = d.read_file(path) buf.write(txt) buf.truncate(size)
def gimmeTrame(self,daNewState): """ Return the update trame to be sent """ if daNewState=="close": data="00000009" elif daNewState=="open": data="00000008" else : LOGGER.warn("Strange state : {}. Trame not sent".format(daNewState)) return "" strTrame=elf.trameStart+data+self.physic_id+self.trameEnd myTrame=Trame.trame(strTrame) myTrame.calculateChecksum() LOGGER.debug("Trame returned : {}".format(myTrame.rawView())) return myTrame.rawView()
def get_outdated_issues(): result = [] now = datetime.now().date() for issue in DatabaseWrapper.get_all_issues(): if issue.get_custom_field('est_date') != '': try: est_date = datetime.strptime(issue.get_custom_field('est_date'), '%d/%b/%y').date() if est_date <= now and issue.status in ('in progress', 'defined'): issue.est_date = est_date issue.team = issue.get_custom_field('team') issue.points = issue.get_custom_field('points') if est_date < now: issue.outdated = True result.append(issue) except ValueError: LOGGER.debug( 'Unexpected date format for issue %s: %s' % (issue.key, issue.get_custom_field('est_date'))) result.sort(key=lambda issue: issue.est_date) return result
def readdir(self, path, offset): LOGGER.debug("readdir %s %d" % (path, offset)) if path == "/": d = self.get_dir(path) else: d = self.get_dir(path + "/") dirs = d.contents(path) if dirs is None: dirs = [] for e in ('.', '..'): if dirs.count(e) == 0: dirs.append(e) for r in dirs: yield fuse.Direntry(r)
def release(self, path, flags): # Called to close the file LOGGER.debug("release %s %x" % (path, flags)) # Release can not return errors, but try anyhow because we have no other choices. # XXX: Is the flush called reliably enough to do this there? if self.open_mode == self.WRITE and self.is_valid_file(path): # for valid files buf = self.get_file_buf(path) d = self.get_dir(path) success = d.write_to(path, buf.getvalue()) LOGGER.debug("release: success: %d\n" % (success)); if self.is_valid_file(path): self.remove_file_buf(path) # Do not keep buffer in memory... self.open_mode = None return None
def getattr(self, path): LOGGER.debug("getattr %s" % path) d = self.get_dir(path) st = MetaDir.Stat() if self.files.has_key(path): st.st_mode = stat.S_IFREG | 0666 st.st_nlink = 1 st.st_size = 0 elif not self.is_valid_file(path): return -errno.ENOENT # No such file or directory elif d.is_directory(path): st.st_mode = stat.S_IFDIR | d.mode(path) st.st_nlink = 2 elif d.is_file(path): st.st_mode = stat.S_IFREG | d.mode(path) st.st_nlink = 1 st.st_size = d.size(path) st.st_mtime = d.mtime(path) else: return -errno.ENOENT # No such file or directory return st
def flush(self, path, flags = 0): # Did we succeed? success = True # Called to close the file LOGGER.debug("flush %s %x" % (path, flags)) if self.open_mode == self.WRITE and self.is_valid_file(path): # for valid files buf = self.get_file_buf(path) d = self.get_dir(path) success = d.write_to(path, buf.getvalue()) LOGGER.debug("flush: success: %d\n" % (success)); if success == False: LOGGER.debug("flush: Returning\n" % (-errno.EIO)); return -errno.EIO return None
def size(self, path): LOGGER.debug("FSdir size %s" % (path)) return len(self.read_file(path))
def mode(self, path): LOGGER.debug("FSdir mode %s" % (path)) return 0755
def mkdir(self, path): LOGGER.debug("FSdir mkdir %s" % (path)) name = self.get_article_file_name(path) self.dirs[name] = True self.fs.set_dir(path, ArticleDir(self.fs, self.config)) return True
def update(self,absX,absY): newCoord=self.translateCoord(absX,absY) strTrame=self.start+newCoord.get('x')+newCoord.get('y')+self.ident+self.end myTrame=Trame.trame(strTrame) myTrame.calculateChecksum() LOGGER.debug("Frame to be send : {}".format(myTrame.lessRawView()))
def chown(self, path, user, group): LOGGER.debug("chown %s %s %s" % (path,user,group)) return None
def chmod(self, path, mode): LOGGER.debug("chmod %s %s" % (path,mode)) return None
def mknod(self, path, mode, dev): # Creates a filesystem node LOGGER.debug("mknod %s %d %s" % (path, mode, dev))