def testLogisticError(): k = 5 data = Data(k, 0, 0) data.importDataFromMat() data.normalize() lg = LogisticLinearClassifier(0.03, 0.03, 576, k, data) err_train, miss_train, err_val, miss_val = lg.train(30) mis_fig = plt.figure() ax2 = mis_fig.add_subplot(111) ax2.plot(err_val, label="error (validation)") ax2.plot(err_train, label="error (training)") title = "std(val)=%f std(err)=%f" % (sp.std(err_val), sp.std(err_train)) mis_fig.suptitle(title) ax2.set_ylabel("error") ax2.set_xlabel("epoch") plt.legend() mis_fig = plt.figure() ax2 = mis_fig.add_subplot(111) ax2.plot(miss_val, label="misclassification ratio (validation)") ax2.plot(miss_train, label="misclassification ratio (training)") mis_fig.suptitle(title) ax2.set_ylabel("misclassification ratio") ax2.set_xlabel("epoch") plt.legend() results, cat = lg.classify(data.test_left, data.test_right) lg.confusion_matrix(cat, data.test_cat.argmax(axis=0)) err = Error() err, misclass = err.norm_total_error(results.T, data.test_cat, k) print "Error on the test set " + str(err) print "Misclassification ratio on the test set " + str(misclass)
def suitable_pile_pop(self, bottomCard): """ each Card in the pile is face-up, the pile alternates in color, and the pile is built top down. Additionally, all Cards in the given pile must be face-up. """ if bottomCard is None: Error.print_error(Error.CARD_IS_NONE, lineno()) return False if bottomCard.is_face_down(): Error.print_error(Error.CARD_IS_FACE_DOWN, lineno()) return False idx = self.index(bottomCard) popped = self[idx:] evenColor = bottomCard.color() for i in range(len(popped)): c = popped[i] if c.is_face_down(): Error.print_error(Error.CARD_IS_FACE_DOWN, lineno()) if c.rank != bottomCard.rank - i: Error.print_error(Error.INVALID_RANK, lineno()) if i % 2 == 0 and c.color() != evenColor or i % 2 == 1 and c.color() == evenColor: Error.print_error(Error.INVALID_COLOR, lineno()) if ( c.is_face_down() or c.rank != bottomCard.rank - i or i % 2 == 0 and c.color() != evenColor or i % 2 == 1 and c.color() == evenColor ): return False return True
def sort_into_dictionary(wedding_list, guest_list): '''sort_into_dictionary(str, dictionary_object) Takes a grouping of strings and sorts them into a python dictionary object. A python dictionary object is implemented using a hash function. This makes anything involving look up extremly fast. Much faster then using a list object. ''' error = Error() wedding_list = wedding_list.split(",") for guest in wedding_list: # ensure that the name before the dash is not empty try: name = guest.split("-")[0].strip().title() response = guest.split("-")[1].strip("\n").lower() except IndexError: error.data_error() else: # check that responses in rsvp file is not empty if not response or not name: error.data_error() # adds the names and replies to the dictionary object. If the name # already exists meaning that there is another guest with same name # append their reply to that name if name not in guest_list.keys() : guest_list[name] = [response] else: guest_list[name].append(response)
def test_gradient(self, xL, xR, cat): epsilon = 10**(-8) a1L, a1R, a2L, a2LR, a2R, a3, z1Lb, z1LRb, z1Rb, z2b, xLb, xRb = self.forward_pass(xL, xR) grad3, grad2L, grad2LR, grad2R, grad1L, grad1R = self.backward_pass(a1L, a1R, a2L, a2LR, a2R, a3, z1Lb, z1LRb, z1Rb, z2b, xLb, xRb, cat) expected_gradient = grad1L[0,8]; e=Error() self.w1l[0,8] += epsilon a1L, a1R, a2L, a2LR, a2R, a3, z1Lb, z1LRb, z1Rb, z2b, xLb, xRb = self.forward_pass(xL, xR) e_plus = e.total_error(a3,cat, self.k)[0] self.w1l[0,8] -= (2*epsilon) a1L, a1R, a2L, a2LR, a2R, a3, z1Lb, z1LRb, z1Rb, z2b, xLb, xRb = self.forward_pass(xL, xR) e_minus = e.total_error(a3,cat, self.k)[0] self.w1l[0,8] += epsilon difference = (e_plus-e_minus) approx_grad = difference/(2*epsilon) print "Derivative = "+str(approx_grad) print "Gradient = "+str(expected_gradient) print "Difference "+str(approx_grad-expected_gradient)
def suitable_card_pop(self): """ @todo: commentme """ if self.is_empty(): Error.print_error(Error.EMPTY_PILE, lineno()) return not self.is_empty()
def enqueue_card(self, card): """ The same as enqueue(), but overwritten by Pile subclasses. Only legal for Stock piles. """ if self.suitable_card_enqueue(card): return self.enqueue(card) Error.print_error(Error.ILLEGAL_ENQUEUE_CARD, lineno())
def pop_card(self): """ Equivalent to deque.pop(), but is overridden by Pile subclasses for additional functionality. """ if self.suitable_card_pop(): return self.pop() Error.print_error(Error.ILLEGAL_POP_CARD, lineno())
def push_card(self, card): """ The same as push(), but is overridden by Pile subclasses to check if pushing the Card to this Pile is legal. """ if self.suitable_card_push(card): return self.push(card) Error.print_error(Error.ILLEGAL_PUSH_CARD, lineno())
def verify_fetch(self, array_pos): """Verifies that truncation or other problems did not take place on retrieve.""" if self.type.is_variable_length: if self.return_code[array_pos] != 0: error = Error(self.environment, "Variable_VerifyFetch()", 0) error.code = self.return_code[array_pos] error.message = "column at array pos %d fetched with error: %d" % (array_pos, self.return_code[array_pos]) raise DatabaseError(error)
def suitable_pile_pop(self, bottomCard): """ @todo: commentme """ if bottomCard is None: Error.print_error(Error.CARD_IS_NONE, lineno()) if not bottomCard in self: Error.print_error(Error.CARD_NOT_IN_PILE, lineno()) return bottomCard is not None and bottomCard in self
def push_pile(self, pile): """ Pushes a list of Cards to the top of this Pile, if it is legal to do so. Overridden by subclasses for which pushing a Pile is legal. """ if self.suitable_pile_push(pile): return self.extend(pile) Error.print_error(Error.ILLEGAL_PUSH_PILE, lineno())
def __get_active_policies(self, repository_name): Error.abort_if(repository_name not in self.settings['repo_id'], "Couldn't find this repository on your TFS") auth = self.__get_auth() fullUrl = self.settings['url'] + '/' + self.settings['project'] + '/_apis/policy/configurations?api-version=2.0' policies = requests.get(fullUrl, auth=auth).json()['value'] policies_from_repository = filter(lambda policy: policy["settings"]["scope"][0]["repositoryId"] == self.settings['repo_id'][repository_name], policies) return list(policies_from_repository)
def check_for_error(self, status, context): if status != oci.OCI_SUCCESS and status != oci.OCI_SUCCESS_WITH_INFO: if status != oci.OCI_INVALID_HANDLE: self.raise_error(context) error = Error(self, context, 0) error.code = 0 error.message = "Invalid handle!" raise DatabaseError(error)
def cr(ctx): try: if (not Configuration.exists()): Utils.print_encoded("Please inform your TFS' information\n") ctx.invoke(configure, url=click.prompt("Url"), username=click.prompt("Username"), password=click.prompt("Password")) ctx.exit() repo = git.Repo('.') ctx.obj = Repository(repo, RepositoryUtils(repo), Tfs(Configuration.load())) except git.exc.InvalidGitRepositoryError: Error.abort("You're not on a valid git repository")
def card_at(self, idx): """ Returns the Card at the specified index in this Pile, if the index is valid. Otherwise, prints an error message and returns None. """ size = len(self) if idx >= -size and idx < size: return self[idx] Error.print_error(Error.INVALID_INDEX, lineno())
def suitable_card_enqueue(self, card): """ Enqueues the specified card to this Pile if it exists and if this Pile is not full. Overridden by subclasses. """ if card is None: Error.print_error(Error.CARD_IS_NONE, lineno(), False) if len(self) >= self.maxlen: Error.print_error(Error.EXCEEDS_MAXIMUM, lineno(), False) return card is not None and len(self) < self.maxlen
def suitable_card_push(self, card): """ A suitable card is a card whose value and suit fit them to be played or placed onto a pile. Overridden by subclasses. """ if card is None: Error.print_error(Error.CARD_IS_NONE, lineno()) if len(self) >= self.maxlen: Error.print_error(Error.EXCEEDS_MAXIMUM, lineno()) return card is not None and len(self) < self.maxlen
def suitable_pile_push(self, pile): """ A suitable pile is a pile of cards that will be accepted upon push. Overridden by subclasses. """ if pile is None: Error.print_error(Error.PILE_IS_NONE, lineno()) if len(self) + len(pile) >= self.maxlen: Error.print_error(Error.EXCEEDS_MAXIMUM, lineno()) return (pile is not None and len(self) + len(pile) < self.maxlen)
def suitable_pile_enqueue(self, pile): """ A pile is suitable if it can be legally enqueued to this Pile. """ if pile is None: return Error.print_error(Error.PILE_IS_NONE, lineno(), False) if len(pile) == 1: return self.suitable_card_enqueue(self, pile[0]) if len(self) + len(pile) >= self.maxlen: Error.print_error(Error.EXCEEDS_MAXIMUM, lineno()) return (pile is not None and len(self) + len(pile) < self.maxlen)
def index(self, card): """ Returns the index of the specified Card in the Pile, if the Card is in this Pile. Otherwise, prints an error message and returns None. """ if card is None: return Error.print_error(Error.CARD_IS_NONE, lineno()) # If the card is valid, search for its index in this Pile. for i in range(len(self)): if self.card_at(i) == card: return i Error.print_error(Error.CARD_NOT_IN_PILE, lineno())
def is_complete(self): """ Returns true if this Pile contains all 13 Cards of its suit in ascending order. """ for i in range(len(self)): card = self[i] if card.rank != i + 1: Error.print_error(Error.INVALID_RANK, lineno()) if card.suit != self.suit: Error.print_error(Error.INVALID_SUIT, lineno()) if card.rank != i + 1 or card.suit != self.suit: return False return len(self) == self.maxlen
def validateCommands(commands): for layer in commands: for command in layer: # test if the command is valid Tagger.getTagsForCommand(command) # test if size is a valid number if re.match(SIZE, command): if(int(command[SPLIT_SIZE:]) not in range(1, 8)): Error.formatError() # test if color is valid HEX elif re.match('color:[a-zA-Z0-9]+', command): code = command.split(':')[-1] if not re.match('[0-9a-fA-F]{6}', code): Error.formatError()
def approve_pull_request(self, repository_name, feature_name): Error.abort_if(repository_name not in self.settings['repo_id'], "Couldn't find this repository on your TFS") policies = self.__get_active_policies(repository_name) self.__deactivate_policies(policies) try: self.__wait_for_merge_analysis(repository_name, feature_name) pull_request = self.__get_pull_requests(repository_name, feature_name, only_active=True)[0] Error.abort_if(pull_request['mergeStatus'] != 'succeeded', "Hotfix couldn't be pushed because conflicts were found") self.__delete_reviewers(pull_request) auth = self.__get_auth() fullUrl = self.settings['url'] + "/_apis/git/repositories/" + self.settings['repo_id'][repository_name] + "/pullrequests/" + str(pull_request['pullRequestId']) + "?api-version=2.0" response = requests.patch(fullUrl, json={"status": "completed", "lastMergeSourceCommit": pull_request['lastMergeSourceCommit']}, auth=auth) self.__wait_for_merge_analysis(repository_name, feature_name) finally: self.__activate_policies(policies)
def suitable_pile_pop(self, bottomCard): """ Piles of cards cannot be popped from a Foundation pile. """ if bottomCard == self.peek(): return True return Error.print_error(Error.ILLEGAL_POP_PILE, lineno(), False)
def __init__(self, indicator=None): self.indicator = indicator self.downloader = indicator.coin.downloader self.timeout_id = None self.error = Error(self) self.started = False self.asset_pair = {}
def suitable_card_push(self, card): """ Cards may not be pushed onto the Stock pile, with the exception of the initial deal. Only Cards from the Waste pile may be enqueued by the dealer to the bottom of the Stock pile. """ return Error.print_error(Error.ILLEGAL_PUSH_CARD, lineno(), False)
def testSquaredError(): k = 5 data = Data(k, 0, 0) data.importDataFromMat() data.normalize() sq = SquaredErrorLinearClassifier(2 ** 10, k) sq.train(data.train_left, data.train_right, data.train_cat) results, cat = sq.classify(data.test_left, data.test_right) sq.confusion_matrix(cat, data.test_cat.argmax(axis=0)) err = Error() err, misclass = err.norm_total_error(results.T, data.test_cat, k) print "Error on the test set " + str(err) print "Misclassification ratio on the test set " + str(misclass)
def __init__(self): super(QWidget, self).__init__() uic.loadUi('ui/add_forum.ui', self) self.cursor = con.cursor() self.add_forum_btb.accepted.connect(self.insert_new) self.add_forum_btb.rejected.connect(self.close) self.error = Error()
def suitable_pile_push(self, pile): """ Piles of cards cannot be pushed to a Foundation pile. """ if len(pile) == 1: return self.suitable_card_push(pile[0]) return Error.print_error(Error.ILLEGAL_PUSH_PILE, False)
class Dictionary: """ Nhóm chức năng từ điển: * Tra từ Anh-Việt * Tra từ Việt-Anh """ def __init__(self): # Connect to Cassandra servers client = connect(cassandra_hosts) self.d = ColumnFamily(client, cassandra_keyspace, 'Dictionary', super=True) self.u = ColumnFamily(client, cassandra_keyspace, 'Users', super=True) self.e = Error() def _lookup(self, keyword, dict_type='en_vi'): try: return self.d.get(dict_type, super_column=str(keyword)) except (NotFoundException, InvalidRequestException): return None def lookup(self, environ): try: session_id = environ['request']['session_id'] except KeyError: return self.e.authen_error("Thiếu session_id") try: self.u.get('session_id', super_column=session_id) except (NotFoundException, InvalidRequestException): return self.e.authen_error() result = self._lookup(environ['request']['keyword']) result2 = self._lookup(environ['request']['keyword'], 'vi_en') result3 = self._lookup(environ['request']['keyword'], 'en_en') if (result is None) and (result2 is None) and (result3 is None): return self.e.not_found("Từ khóa bạn tìm không có trong từ điển") xml = [] if result is not None: xml.append('<result type="en_vi" keyword="%s" mean="%s" spell="%s" status_code="200"/>' \ % (xml_format(environ['request']['keyword']), xml_format(result['nghia']), xml_format(result['phien_am_quoc_te']))) if result2 is not None: xml.append('<result type="vi_en" keyword="%s" mean="%s" spell="" status_code="200"/>' \ % (xml_format(environ['request']['keyword']), xml_format(result2['nghia']))) return '\n\n'.join(xml) def total_words(self, dict_type='en_vi'): return self.d.get_count(dict_type)
def localVarName(value, position): """A name of a class.""" if not value[0].islower(): return Error('BadLocalVariableName', 'Local variable must start with a lower case letter', position, LINES) return None
def ivarName(value, position): """A name of a class.""" if not value[0] == '_' or not value[1].islower(): return Error( 'BadInstanceVariableName', 'Instance variable names start with _ and not be capitalized', position, LINES) return None
def validate_with_humanized_errors( data, schema, max_sub_error_length=MAX_VALIDATION_ERROR_ITEM_LENGTH): try: return schema(data) except (Invalid, MultipleInvalid) as e: raise Error(humanize_error(data, e, max_sub_error_length))
def initialize(self, base_instructions): self.memory = Memory(20) self.error = Error() self.internal_state = Internal_State() self.io = IO() self.base_instructions = base_instructions
import rospy from utils.geometry import Vector2D from utils.config import * from krssg_ssl_msgs.srv import path_plan from krssg_ssl_msgs.msg import point_2d from profiler_w import * from pid import pid from pso import PSO from error import Error v = None kubid = None expectedTraverseTime = None pso = None errorInfo = Error() REPLAN = 0 FIRST_CALL = 1 homePos = None expectedTraverseTime = None def Get_Omega(start, t, kub_id, totalAngle, homePos_): global FIRST_CALL, expectedTraverseTime, v, homePos, kubid REPLAN = 0 homePos = homePos_ kubid = kub_id currAngle = homePos[kub_id].theta kubid = kub_id if FIRST_CALL:
def start(self, start_index=None): """ Starts pyrrent2http client with specified settings. If it can be started in startup_timeout seconds, exception will be raised. :param start_index: File index to start download instantly, if not specified, downloading will be paused, until any file requested """ download_path = self._validate_save_path(self.download_path) if not can_bind(self.bind_host, self.bind_port): port = find_free_port(self.bind_host) if port is False: raise Error("Can't find port to bind pyrrent2http", Error.BIND_ERROR) self._log("Can't bind to %s:%s, so we found another port: %d" % (self.bind_host, self.bind_port, port)) self.bind_port = port kwargs = { 'torrentConnectBoost': self.torrent_connect_boost, 'trackers': ",".join(self.trackers), 'resumeFile': self.resume_file, 'minReconnectTime': self.min_reconnect_time, 'enableUPNP': self.enable_upnp, 'showAllStats': self.log_stats, 'debugAlerts': self.debug_alerts, 'keepComplete': self.keep_complete, 'dhtRouters': ",".join(self.dht_routers), 'userAgent': self.user_agent, 'enableLSD': self.enable_lsd, 'uri': self.uri, 'randomPort': self.use_random_port, 'noSparseFile': self.no_sparse, 'maxUploadRate': self.upload_kbps, 'downloadPath': download_path, 'showOverallProgress': self.log_overall_progress, 'enableDHT': self.enable_dht, 'showFilesProgress': self.log_files_progress, 'requestTimeout': self.request_timeout, 'bindAddress': "%s:%s" % (self.bind_host, self.bind_port), 'maxDownloadRate': self.download_kbps, 'connectionSpeed': self.connection_speed, 'keepIncomplete': self.keep_incomplete, 'enableTCP': self.enable_tcp, 'listenPort': self.listen_port, 'keepFiles': self.keep_files, 'stateFile': self.state_file, 'peerConnectTimeout': self.peer_connect_timeout, 'maxFailCount': self.max_failcount, 'showPiecesProgress': self.log_pieces_progress, 'idleTimeout': self.max_idle_timeout, #'fileIndex': start_index, 'connectionsLimit': self.connections_limit, 'enableScrape': self.enable_scrape, 'enableUTP': self.enable_utp, 'encryption': self.encryption, 'enableNATPMP': self.enable_natpmp } self._log("Invoking pyrrent2http") class Logging(object): def __init__(self, _log): self._log = _log def info(self, message): if LOGGING: self._log('INFO: %s' % (message,)) def error(self, message): if LOGGING: self._log('ERROR: %s' % (message,)) pyrrent2http.logging = Logging(self._log) self.pyrrent2http = pyrrent2http.Pyrrent2http(**kwargs) self.pyrrent2http.startSession() self.pyrrent2http.startServices() self.pyrrent2http.addTorrent() self.pyrrent2http.startHTTP() self.pyrrent2http_loop = threading.Thread(target = self.pyrrent2http.loop) self.pyrrent2http_loop.start() start = time.time() self.started = True initialized = False while (time.time() - start) < self.startup_timeout: time.sleep(0.1) if not self.is_alive(): raise Error("Can't start pyrrent2http, see log for details", Error.PROCESS_ERROR) try: #self.status(1) initialized = True break except Error: pass if not initialized: self.started = False raise Error("Can't start pyrrent2http, time is out", Error.TIMEOUT) self._log("pyrrent2http successfully started.")
def start(self, start_index=None): """ Starts torrent2http client with specified settings. If it can be started in startup_timeout seconds, exception will be raised. :param start_index: File index to start download instantly, if not specified, downloading will be paused, until any file requested """ self.platform = self.platform or Platform() binary_path = self._get_binary_path(self.binaries_path) download_path = self._validate_save_path(self.download_path) if not can_bind(self.bind_host, self.bind_port): port = find_free_port(self.bind_host) if port is False: raise Error("Can't find port to bind torrent2http", Error.BIND_ERROR) self._log("Can't bind to %s:%s, so we found another port: %d" % (self.bind_host, self.bind_port, port)) self.bind_port = port kwargs = { '--bind': "%s:%s" % (self.bind_host, self.bind_port), '--uri': self.uri, '--file-index': start_index, '--dl-path': download_path, '--connections-limit': self.connections_limit, '--dl-rate': self.download_kbps, '--ul-rate': self.upload_kbps, '--enable-dht': self.enable_dht, '--enable-lsd': self.enable_lsd, '--enable-natpmp': self.enable_natpmp, '--enable-upnp': self.enable_upnp, '--enable-scrape': self.enable_scrape, '--encryption': self.encryption, '--show-stats': self.log_stats, '--files-progress': self.log_files_progress, '--overall-progress': self.log_overall_progress, '--pieces-progress': self.log_pieces_progress, '--listen-port': self.listen_port, '--random-port': self.use_random_port, '--keep-complete': self.keep_complete, '--keep-incomplete': self.keep_incomplete, '--keep-files': self.keep_files, '--max-idle': self.max_idle_timeout, '--no-sparse': self.no_sparse, '--resume-file': self.resume_file, '--user-agent': self.user_agent, '--state-file': self.state_file, '--enable-utp': self.enable_utp, '--enable-tcp': self.enable_tcp, '--debug-alerts': self.debug_alerts, '--torrent-connect-boost': self.torrent_connect_boost, '--connection-speed': self.connection_speed, '--peer-connect-timeout': self.peer_connect_timeout, '--request-timeout': self.request_timeout, '--min-reconnect-time': self.min_reconnect_time, '--max-failcount': self.max_failcount, '--dht-routers': ",".join(self.dht_routers), '--trackers': ",".join(self.trackers), } args = [binary_path] for k, v in kwargs.iteritems(): if v is not None: if isinstance(v, bool): if v: #args.append(k) args.append("%s=true" % k) else: args.append("%s=false" % k) else: #args.append(k) if isinstance(v, str) or isinstance(v, unicode): v = ensure_fs_encoding(v) else: v = str(v) #args.append(v) args.append("%s=%s" % (k, v)) self._log("Invoking %s" % " ".join(args)) startupinfo = None if self.platform.system == "windows": startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= 1 startupinfo.wShowWindow = 0 self.logpipe = logpipe.LogPipe(self._log) try: self.process = subprocess.Popen(args, stderr=self.logpipe, stdout=self.logpipe, startupinfo=startupinfo) except OSError, e: raise Error("Can't start torrent2http: %r" % e, Error.POPEN_ERROR)
def get_metrics_as_csv(): """ **GET Request Parameters** > `metric` > `start_timestamp` > `end_timestamp` """ try: # get request parameters metric = request.args.get("metric") start_timestamp = request.args.get("start_timestamp") end_timestamp = request.args.get("end_timestamp") # throw error if any required parameters are None if None in [metric, start_timestamp, end_timestamp]: raise Error("NULL_VALUE") # generate all function queries fullness_query = models.BinFullness.query.filter( and_( start_timestamp <= models.BinFullness.datetimestamp, end_timestamp >= models.BinFullness.datetimestamp, )).all weight_query = models.BinWeight.query.filter( and_( start_timestamp <= models.BinWeight.datetimestamp, end_timestamp >= models.BinWeight.datetimestamp, )).all usage_query = models.BinUsage.query.filter( and_( start_timestamp <= models.BinUsage.datetimestamp, end_timestamp >= models.BinUsage.datetimestamp, )).all # generate dict of functions for querying specific data metric data_query_func = { "fullness": fullness_query, "weight": weight_query, "usage": usage_query, } # check if parameters are valid if metric not in data_query_func.keys(): raise Error("INVALID_PARAM") # throw error if start_timestamp > end_timestamp if start_timestamp > end_timestamp: raise Error("TIMESTAMP_ISSUE") # call query function query_data = data_query_func[metric]() # iterate through each row of data obj_dict = None for row in query_data: # convert fullness data into dictionary form such as {key1:str1, key2:str2} row_dict_form = dict(row) # assign obj_dict to appropriate keys based on model schema if not obj_dict: headers = row_dict_form.keys() # create a dictionary of empty lists such as {key1:[], key2:[]} obj_dict = dict(zip(headers, [[] for i in range(len(headers))])) # append data to the obj_dict based on keys for k in obj_dict.keys(): obj_dict[k].append(row_dict_form[k]) # create the pandas dataframe df = pandas.DataFrame(obj_dict) resp = make_response(df.to_csv(index=False)) file_name = "{}_{}_{}.csv".format(metric, start_timestamp, end_timestamp) resp.headers[ "Content-Disposition"] = "attachment; filename=" + file_name resp.headers["Content-Type"] = "text/csv" # return as CSV file return resp except Error as e: return Error.em[str(e)] except Exception as e: return str(e), 400
def __init__(self, message='Cannot place tile there'): Error.__init__(self, message)
def __init__(self, message='Indices out of bound'): Error.__init__(self, message)
def innerPropertyVariable(value, position): if not value[0].startswith("_"): return Error('BadInnerPropertyVariable', 'Inner property names must be underscored', position, LINES) return None
def propertyName(value, position): """Checks a property name.""" if not value[0].islower(): return Error('BadPropertyName', 'Property names must not be capitalized', position, LINES) return None
def unexpectedHandler(kind, value, pos): """Handle a syntactically but not stylistically valid token.""" return Error(kind, 'Did not expect %r here' % value, pos, LINES)
def shouldBeNewline(result, pos): """Expect a newline here.""" if not isinstance(result, tuple): return Error('MissingNewline', 'Should have newline after ;', pos, LINES)
class Engine: """ This is python binding class to torrent2http client. """ SUBTITLES_FORMATS = ['.aqt', '.gsub', '.jss', '.sub', '.ttxt', '.pjs', '.psb', '.rt', '.smi', '.stl', '.ssf', '.srt', '.ssa', '.ass', '.usf', '.idx'] def _ensure_binary_executable(self, path): st = os.stat(path) if not st.st_mode & stat.S_IEXEC: self._log("%s is not executable, trying to change its mode..." % path) os.chmod(path, st.st_mode | stat.S_IEXEC) st = os.stat(path) if st.st_mode & stat.S_IEXEC: self._log("Succeeded") return True else: self._log("Failed") return False return True def _log(self, message): if self.logger: self.logger(message) else: xbmc.log("[torrent2http] %s" % message) def _get_binary_path(self, binaries_path): """ Detects platform and returns corresponding torrent2http binary path :param binaries_path: :return: torrent2http binary path """ binary = "torrent2http" + (".exe" if self.platform.system == 'windows' else "") binary_dir = os.path.join(binaries_path, "%s_%s" % (self.platform.system, self.platform.arch)) binary_path = os.path.join(binary_dir, binary) if not os.path.isfile(binary_path): raise Error("Can't find torrent2http binary for %s" % self.platform, Error.UNKNOWN_PLATFORM, platform=str(self.platform)) if not self._ensure_binary_executable(binary_path): if self.platform.system == "android": self._log("Trying to copy torrent2http to ext4, since the sdcard is noexec...") xbmc_home = os.environ.get('XBMC_HOME') or os.environ.get('KODI_HOME') if not xbmc_home: raise Error("Suppose we are running XBMC, but environment variable " "XBMC_HOME or KODI_HOME is not found", Error.XBMC_HOME_NOT_DEFINED) base_xbmc_path = dirname(dirname(dirname(xbmc_home))) android_binary_dir = os.path.join(base_xbmc_path, "files") if not os.path.exists(android_binary_dir): os.makedirs(android_binary_dir) android_binary_path = os.path.join(android_binary_dir, binary) if not os.path.exists(android_binary_path) or \ int(os.path.getmtime(android_binary_path)) < int(os.path.getmtime(binary_path)): import shutil shutil.copy2(binary_path, android_binary_path) if not self._ensure_binary_executable(android_binary_path): raise Error("Can't make %s executable" % android_binary_path, Error.NOEXEC_FILESYSTEM) binary_path = android_binary_path else: raise Error("Can't make %s executable, ensure it's placed on exec partition and " "partition is in read/write mode" % binary_path, Error.NOEXEC_FILESYSTEM) self._log("Selected %s as torrent2http binary" % binary_path) return binary_path def __init__(self, uri=None, binaries_path=None, platform=None, download_path=".", bind_host='127.0.0.1', bind_port=5001, connections_limit=None, download_kbps=None, upload_kbps=None, enable_dht=True, enable_lsd=True, enable_natpmp=True, enable_upnp=True, enable_scrape=False, log_stats=False, encryption=Encryption.ENABLED, keep_complete=False, keep_incomplete=False, keep_files=False, log_files_progress=False, log_overall_progress=False, log_pieces_progress=False, listen_port=6881, use_random_port=False, max_idle_timeout=None, no_sparse=False, resume_file=None, user_agent=None, startup_timeout=5, state_file=None, enable_utp=True, enable_tcp=True, debug_alerts=False, logger=None, torrent_connect_boost=50, connection_speed=50, peer_connect_timeout=15, request_timeout=20, min_reconnect_time=60, max_failcount=3, dht_routers=None, trackers=None): """ Creates engine instance. It doesn't do anything except initializing object members. For starting engine use start() method. :param uri: Torrent URI (magnet://, file:// or http://) :param binaries_path: Path to torrent2http binaries :param platform: Object with two methods implemented: arch() and system() :param download_path: Torrent download path :param bind_host: Bind host of torrent2http :param bind_port: Bind port of torrent2http :param connections_limit: Set a global limit on the number of connections opened :param download_kbps: Max download rate (kB/s) :param upload_kbps: Max upload rate (kB/s) :param enable_dht: Enable DHT (Distributed Hash Table) :param enable_lsd: Enable LSD (Local Service Discovery) :param enable_natpmp: Enable NATPMP (NAT port-mapping) :param enable_upnp: Enable UPnP (UPnP port-mapping) :param enable_scrape: Enable sending scrape request to tracker (updates total peers/seeds count) :param log_stats: Log all stats (incl. log_overall_progress, log_files_progress, log_pieces_progress) :param encryption: Encryption: 0=forced 1=enabled (default) 2=disabled :param keep_complete: Keep complete files after exiting :param keep_incomplete: Keep incomplete files after exiting :param keep_files: Keep all files after exiting (incl. keep_complete and keep_incomplete) :param log_files_progress: Log files progress :param log_overall_progress: Log overall progress :param log_pieces_progress: Log pieces progress :param listen_port: Use specified port for incoming connections :param use_random_port: Use random listen port (49152-65535) :param max_idle_timeout: Automatically shutdown torrent2http if no connection are active after a timeout :param no_sparse: Do not use sparse file allocation :param resume_file: Use fast resume file :param user_agent: Set an user agent :param startup_timeout: torrent2http startup timeout :param state_file: Use file for saving/restoring session state :param enable_utp: Enable uTP protocol :param enable_tcp: Enable TCP protocol :param debug_alerts: Show debug alert notifications :param logger: Instance of logging.Logger :param torrent_connect_boost: The number of peers to try to connect to immediately when the first tracker response is received for a torrent :param connection_speed: The number of peer connection attempts that are made per second :param peer_connect_timeout: The number of seconds to wait after a connection attempt is initiated to a peer :param request_timeout: The number of seconds until the current front piece request will time out :param min_reconnect_time: The time to wait between peer connection attempts. If the peer fails, the time is multiplied by fail counter :param max_failcount: The maximum times we try to connect to a peer before stop connecting again :param dht_routers: List of additional DHT routers (host:port pairs) :param trackers: List of additional tracker URLs """ self.dht_routers = dht_routers or [] self.trackers = trackers or [] self.max_failcount = max_failcount self.min_reconnect_time = min_reconnect_time self.request_timeout = request_timeout self.peer_connect_timeout = peer_connect_timeout self.connection_speed = connection_speed self.torrent_connect_boost = torrent_connect_boost self.platform = platform self.bind_host = bind_host self.bind_port = bind_port self.binaries_path = binaries_path or os.path.join(dirname(dirname(dirname(os.path.abspath(__file__)))), 'bin') self.download_path = download_path self.connections_limit = connections_limit self.download_kbps = download_kbps self.upload_kbps = upload_kbps self.enable_dht = enable_dht self.enable_lsd = enable_lsd self.enable_natpmp = enable_natpmp self.enable_upnp = enable_upnp self.enable_scrape = enable_scrape self.log_stats = log_stats self.encryption = encryption self.keep_complete = keep_complete self.keep_incomplete = keep_incomplete self.keep_files = keep_files self.log_files_progress = log_files_progress self.log_overall_progress = log_overall_progress self.log_pieces_progress = log_pieces_progress self.listen_port = listen_port self.use_random_port = use_random_port self.max_idle_timeout = max_idle_timeout self.no_sparse = no_sparse self.resume_file = resume_file self.user_agent = user_agent self.startup_timeout = startup_timeout self.state_file = state_file self.wait_on_close_timeout = None self.enable_utp = enable_utp self.enable_tcp = enable_tcp self.debug_alerts = debug_alerts self.logger = logger self.uri = uri self.logpipe = None self.process = None self.started = False @staticmethod def _validate_save_path(path): """ Ensures download path can be accessed locally. :param path: Download path :return: Translated path """ import xbmc path = xbmc.translatePath(path) if "://" in path: if sys.platform.startswith('win') and path.lower().startswith("smb://"): path = path.replace("smb:", "").replace("/", "\\") else: raise Error("Downloading to an unmounted network share is not supported", Error.INVALID_DOWNLOAD_PATH) if not os.path.isdir(ensure_fs_encoding(path)): raise Error("Download path doesn't exist (%s)" % path, Error.INVALID_DOWNLOAD_PATH) return path def start(self, start_index=None): """ Starts torrent2http client with specified settings. If it can be started in startup_timeout seconds, exception will be raised. :param start_index: File index to start download instantly, if not specified, downloading will be paused, until any file requested """ self.platform = self.platform or Platform() binary_path = self._get_binary_path(self.binaries_path) download_path = self._validate_save_path(self.download_path) if not can_bind(self.bind_host, self.bind_port): port = find_free_port(self.bind_host) if port is False: raise Error("Can't find port to bind torrent2http", Error.BIND_ERROR) self._log("Can't bind to %s:%s, so we found another port: %d" % (self.bind_host, self.bind_port, port)) self.bind_port = port kwargs = { '--bind': "%s:%s" % (self.bind_host, self.bind_port), '--uri': self.uri, '--file-index': start_index, '--dl-path': download_path, '--connections-limit': self.connections_limit, '--dl-rate': self.download_kbps, '--ul-rate': self.upload_kbps, '--enable-dht': self.enable_dht, '--enable-lsd': self.enable_lsd, '--enable-natpmp': self.enable_natpmp, '--enable-upnp': self.enable_upnp, '--enable-scrape': self.enable_scrape, '--encryption': self.encryption, '--show-stats': self.log_stats, '--files-progress': self.log_files_progress, '--overall-progress': self.log_overall_progress, '--pieces-progress': self.log_pieces_progress, '--listen-port': self.listen_port, '--random-port': self.use_random_port, '--keep-complete': self.keep_complete, '--keep-incomplete': self.keep_incomplete, '--keep-files': self.keep_files, '--max-idle': self.max_idle_timeout, '--no-sparse': self.no_sparse, '--resume-file': self.resume_file, '--user-agent': self.user_agent, '--state-file': self.state_file, '--enable-utp': self.enable_utp, '--enable-tcp': self.enable_tcp, '--debug-alerts': self.debug_alerts, '--torrent-connect-boost': self.torrent_connect_boost, '--connection-speed': self.connection_speed, '--peer-connect-timeout': self.peer_connect_timeout, '--request-timeout': self.request_timeout, '--min-reconnect-time': self.min_reconnect_time, '--max-failcount': self.max_failcount, '--dht-routers': ",".join(self.dht_routers), '--trackers': ",".join(self.trackers), } args = [binary_path] for k, v in kwargs.iteritems(): if v is not None: if isinstance(v, bool): if v: #args.append(k) args.append("%s=true" % k) else: args.append("%s=false" % k) else: #args.append(k) if isinstance(v, str) or isinstance(v, unicode): v = ensure_fs_encoding(v) else: v = str(v) #args.append(v) args.append("%s=%s" % (k, v)) self._log("Invoking %s" % " ".join(args)) startupinfo = None if self.platform.system == "windows": startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= 1 startupinfo.wShowWindow = 0 self.logpipe = logpipe.LogPipe(self._log) try: self.process = subprocess.Popen(args, stderr=self.logpipe, stdout=self.logpipe, startupinfo=startupinfo) except OSError, e: raise Error("Can't start torrent2http: %r" % e, Error.POPEN_ERROR) start = time.time() self.started = True initialized = False while (time.time() - start) < self.startup_timeout: time.sleep(0.1) if not self.is_alive(): raise Error("Can't start torrent2http, see log for details", Error.PROCESS_ERROR) try: self.status(1) initialized = True break except Error: pass if not initialized: self.started = False raise Error("Can't start torrent2http, time is out", Error.TIMEOUT) self._log("torrent2http successfully started.")
def syntesizePropertyName(value, position): if value[0].startswith("_"): return Error('BadSyntesizePropertyName', 'Property names must not be underscored', position, LINES) return None
def _decode(response): try: return simplejson.loads(response) except (KeyError, ValueError), e: raise Error("Can't decode response from torrent2http: %r" % e, Error.REQUEST_ERROR)
def inv(A): if deter(A.M) == 0: raise Error("Matrix is NOT INVERTIBLE") res = solve(A, identity(len(A))) res.steps[-2] = "%s^{-1} = %s" % (A, res) return res
def run(self): # Parse out the PATCH_XML AutoPatchXML().parse() Error.report()
def analisis(): global lineasMenu, nombre, errores,tokens,platillos,reservada,nombre,categorias,menu,identificadores,limite numero_token=0 numero_error=0 fila=1 estado=0 patron_identificador = r"[restaunRESTAUN]" current_cat="" current_id="" current_name="" current_precio="" current_descripcion="" estado=0 lexema="" columna=1 print("Desea establecer un limite de precios?\n1.Si\n2.No") respuesta=input() if respuesta==str(1): limite=input("Ingrese el valor:") for caracter in lineasMenu: columna +=1 if caracter =="\n": fila+=1 columna=0 estado=0 elif estado==0: lexema="" if caracter==" ": continue elif caracter=="": continue elif re.search(patron_identificador, caracter): lexema += caracter estado=1 elif caracter=="\'": lexema=caracter numero_token +=1 tokens.append(Token(numero_token,lexema,fila,columna,"TK_comilla_simple")) lexema="" estado=6 elif caracter=="[": lexema=caracter numero_token+=1 tokens.append(Token(numero_token,lexema,fila,columna,"TK_corchete")) lexema="" estado=8 else: error=caracter numero_error+=1 errores.append(Error(numero_error,fila,columna,error,"Caracter no valido")) estado=0 continue elif estado==1: if re.search(r"[a-zA-z\s]",caracter): lexema+=caracter continue elif caracter=="=": if reservada =="": if lexema.strip().lower()=="restaurante": reservada=lexema.strip() lexema=caracter numero_token += 1 tokens.append(Token(numero_token,reservada,fila,columna-1,"Palabra Reservada")) else: numero_error+=1 errores.append(Error(numero_error,fila,columna-1,lexema.strip(),"Palabra reservada incorrecta")) numero_token tokens.append(Token(numero_token,lexema.strip(),fila,columna-1,"TK_cadena")) else: numero_error +=1 errores.append(Error(numero_error,fila,columna-1,lexema.strip(),"Ya existe la palabra reservada")) numero_token tokens.append(Token(numero_token,lexema.strip(),fila,columna-1,"TK_cadena")) numero_token +=1 tokens.append(Token(numero_token,caracter,fila,columna,"Tk_igual")) lexema="" estado=2 else: error=caracter numero_error +=1 errores.append(Error(numero_error,fila,columna,caracter,"Caracter no valido")) break elif estado==2: if caracter==" ": continue elif caracter=="'": lexema=caracter numero_token+=1 tokens.append(Token(numero_token,lexema,fila,columna,"TK_comilla_simple")) lexema="" estado=3 else: error=caracter numero_error +=1 errores.append(Error(numero_error,fila,columna,caracter,"Caracter no valido, se esperaba ' ")) continue elif estado==3: if re.search(r"[^']",caracter): lexema += caracter continue elif caracter=="'": nombre=lexema.strip() numero_token+=1 tokens.append(Token(numero_token,nombre,fila,columna-1,"TK_cadena")) numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"Tk_comilla_simple")) lexema="" estado=0 else: numero_error+=1 error=caracter errores.append(Error(numero_error,fila,columna,error,"Se esperaba: ' ")) continue elif estado==6: if re.search(r"[A-Za-z\s]",caracter): lexema +=caracter continue elif caracter=="'": current_cat=lexema.strip() categorias.append(lexema.strip()) numero_token+=1 tokens.append(Token(numero_token,lexema.strip(),fila,columna-1,"TK_categoria")) lexema="" numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_comilla_simple")) estado=7 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Caracter no valido")) elif estado==7: if caracter==" ": continue elif caracter==":": numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_dos_puntos")) estado=0 continue else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Caracter invalido")) elif estado==8: if caracter==" ": continue elif re.search(r"[a-z]",caracter): lexema +=caracter estado=9 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"El identificador debe iniciar en una letra minuscula")) elif estado==9: if caracter==" ": continue elif re.search(r"[a-z0-9_]",caracter): lexema+=caracter continue elif caracter==";": numero_token+=1 current_id=lexema.strip() for element in identificadores: if current_id==element: numero_error+=1 errores.append(Error(numero_error,fila,columna,current_id,"Ya existe un producto con ese identificador")) else: identificadores.append(current_id) tokens.append(Token(numero_token,current_id,fila,columna-1,"TK_identificador")) numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_punto_coma")) lexema="" estado=10 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Caracter no valido en el identificador")) continue elif estado==10: if caracter==" ": continue elif caracter=="'": numero_token+=1 tokens.append(Token(numero_token,lexema,fila,columna,"TK_comilla_simple")) estado=11 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Se esperaba: ' ")) continue elif estado==11: if re.search(r"[^']",caracter): lexema+=caracter continue elif caracter=="'": numero_token+=1 current_name=lexema.strip() tokens.append(Token(numero_token,current_name,fila,columna-1,"TK_cadena")) numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_comilla_simple")) lexema="" estado=12 elif estado==12: if caracter==" ": continue elif caracter==";": numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_punto_coma")) estado=13 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Se esperaba: ;")) elif estado==13: if caracter==" ": continue elif re.search(r"[0-9]",caracter): lexema +=caracter estado=14 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Se esperaba un numero: 0-9")) elif estado==14: if re.search(r"[0-9]",caracter): lexema +=caracter elif caracter==".": lexema +=caracter estado=15 elif caracter==" ": numero_token+=1 current_precio="{0:.2f}".format(float(lexema)) tokens.append(Token(numero_token,current_precio,fila,columna-1,"TK_numero")) lexema="" estado=16 continue elif caracter==r";": numero_token+=1 current_precio="{0:.2f}".format(float(lexema)) tokens.append(Token(numero_token,current_precio,fila,columna-1,"TK_numero")) lexema="" numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_punto_coma")) estado=17 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Caracter no valido")) elif estado==15: if re.search(r"[0-9]",caracter): lexema+=caracter continue elif caracter==" ": numero_token+=1 current_precio="{0:.2f}".format(float(lexema)) tokens.append(Token(numero_token,current_precio,fila,columna-1,"TK_numero")) lexema="" estado=16 continue elif caracter==r";": numero_token+=1 current_precio="{0:.2f}".format(float(lexema)) tokens.append(Token(numero_token,current_precio,fila,columna-1,"TK_numero")) lexema="" numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_punto_coma")) estado=17 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Caracter no valido, se esperaba un valor [0-9]")) elif estado==16: if caracter==" ": continue elif caracter==";": numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_punto_coma")) estado=17 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Caracter no valido, se esperaba: ;")) elif estado ==17: if caracter==" ": continue elif caracter=="'": numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_comilla_simple")) estado=18 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Caracter no valido, se esperaba: '")) elif estado==18: if re.search(r"[^']",caracter): lexema+=caracter continue elif caracter=="'": numero_token+=1 current_descripcion=lexema.strip() tokens.append(Token(numero_token,current_descripcion,fila,columna,"TK_cadena")) lexema="" numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_comilla_simple")) platillos.append(Platillo(current_cat,current_id,current_name,current_precio,current_descripcion)) estado=19 else: numero_error+=1 errores.append(Error(numero_error,fila,columna,caracter,"Se esperaba: '")) continue elif estado==19: if caracter==" ": continue elif caracter=="]": numero_token+=1 tokens.append(Token(numero_token,caracter,fila,columna,"TK_corchete")) estado=0 continue if errores==[]: menu=Menu(nombre,categorias,platillos) generarMenuHTML(menu) generarTokensHTML(tokens) else: generarErroresHTML(errores) generarTokensHTML(tokens)
def sizedCType(value, position): """A type modifier.""" for m in re.compile(r'\s\s+').finditer(value): return Error('ExtraSpace', 'Extra space in type name', position + m.start(), LINES) return None
def statement(self, indice): if not self.existeToken(indice): Error.NaoFoiPossivelLerMaisToken(self.infoTokens[indice],"statement") return indice if self.tokens[indice] == TipoToken.SepAbreChave.name: # block indice = self.block(indice) return indice '''if self.tokens[indice] == TipoToken.Variavel.name: # <identifier> if not self.tokens[indice] == TipoToken.SepDoisPontos.name: Error.RecebeuTokenInesperado(self.infoTokens[indice],TipoToken.SepDoisPontos.name, self.tokens[indice]) return indice indice += 1 return self.statement(indice)''' if self.tokens[indice] == TipoToken.PCIf.name: # if indice += 1 indice = self.parExpression(indice) indice += 1 indice = self.statement(indice) if not self.existeToken(indice): return indice if self.tokens[indice] == TipoToken.PCElse.name: indice += 1 return self.statement(indice) return indice if self.tokens[indice] == TipoToken.PCWhile.name: # while indice += 1 indice = self.parExpression(indice) indice += 1 indice = self.statement(indice) return indice if self.tokens[indice] == TipoToken.PCReturn.name: # return indice += 1 if not self.existeToken(indice): Error.EsperaTokenFimArquivo(self.infoTokens[indice],TipoToken.SepPontoEVirgula.name) return indice if self.tokens[indice] != TipoToken.SepPontoEVirgula.name: indice = self.expression(indice) if not self.existeToken(indice): Error.EsperaTokenFimArquivo(self.infoTokens[indice],TipoToken.SepPontoEVirgula.name) return indice if self.tokens[indice] != TipoToken.SepPontoEVirgula.name: Error.RecebeuTokenInesperado(self.infoTokens[indice],TipoToken.SepPontoEVirgula.name, self.tokens[indice]) return indice indice += 1 return indice if self.tokens[indice] == TipoToken.SepPontoEVirgula.name: indice += 1 return indice indice = self.statementExpression(indice) if not self.existeToken(indice): Error.NaoFoiPossivelLerMaisToken(self.infoTokens[indice],TipoToken.SepPontoEVirgula.name) return indice if self.tokens[indice] != TipoToken.SepPontoEVirgula.name: # ; Error.RecebeuTokenInesperado(self.infoTokens[indice],TipoToken.SepPontoEVirgula.name, self.tokens[indice]) return indice indice += 1 return indice
class CPU: def __init__(self): self.memory = None self.error = None self.internal_state = None self.io = None self.clock = None self.base_instructions = None # inicia a cpu com tudo que ela precisa def initialize(self, base_instructions): self.memory = Memory(20) self.error = Error() self.internal_state = Internal_State() self.io = IO() self.base_instructions = base_instructions # altera a memoria atual de uma cpu def change_memory(self, memory): self.memory = memory # altera os erros base de uma cpu def change_error(self, error): self.error = error # altera o estado interno da cpu def change_internal_state(self, internal_state): self.internal_state = internal_state def change_IO(self, io): self.io = io # nova task na cpu (nova memoria, estado interno, io) def change_task(self, task): self.memory = task.memory self.internal_state = task.cpu_state self.io = task.io def print_internal_state(self, clock): s = "PC: " + str(self.internal_state.PC) + " - Register A: " + str( self.internal_state.A) + " - Register X: " + str( self.internal_state.X) + " - CPU Mode: " + str( self.internal_state.mode) + " - Clock: " + str(clock.count) print(s) return s def read_instruction(self, base_instructions): current_instruction_code = self.memory.read(self.internal_state.PC, self.error) print(f"\n--- RUNNING INSTRUCTION {current_instruction_code} ---") for instruction in base_instructions: if (instruction.code == current_instruction_code): # se for uma instrucao de ES, altera o modo de CPU e dá ao mode_info a instrucao que causou a alteracao if (instruction.code == 19 or instruction.code == 20 or instruction.code == 21): self.internal_state.mode = "PRIVILEGIADA" self.internal_state.mode_info = instruction.code return # se instrucao de parada if (instruction.code == 1): self.internal_state.mode = "PARADA" return self.execute_instruction(instruction.code) self.internal_state.PC = self.internal_state.PC + 1 + instruction.arguments return self.error.update(4) self.internal_state.mode = "PARADA" self.internal_state.mode_info = "RECEIVED INSTRUCTION: " + \ str(current_instruction_code) def execute_instruction(self, code): # 2 - CARGI - 1Arg - A = A1 if (code == 2): self.internal_state.A = self.memory.info[self.internal_state.PC + 1] # 3 - CARGM - 1Arg - A = mem[A1] if (code == 3): self.internal_state.A = self.memory.read( self.memory.info[self.internal_state.PC + 1], self.error) # 4 - CARGX - 1 - A = mem[A1+X] if (code == 4): self.internal_state.A = self.memory.read( self.memory.info[self.internal_state.PC + 1 + self.internal_state.X], self.error) # 5 - ARMM - 1Arg - mem[A1] = A if (code == 5): self.memory.write(self.memory.info[self.internal_state.PC + 1], self.internal_state.A, self.error) # 6 - ARMX - 1Arg - mem[A1+X] = A if (code == 6): self.memory.write( self.memory.info[self.internal_state.PC + 1 + self.internal_state.X], self.internal_state.A, self.error) # 7 - MVAX - 0Arg - X = A if (code == 7): self.internal_state.X = self.internal_state.A # 8 - MVXA - 0Arg - A = X if (code == 8): self.internal_state.A = self.internal_state.X # 9 - INCX - 0 - 0Arg - X++ if (code == 9): self.internal_state.X = self.internal_state.X + 1 # 10 - SOMA - 1Arg - A+=mem[A1] if (code == 10): self.internal_state.A = self.internal_state.A + \ self.memory.read( self.memory.info[self.internal_state.PC + 1], self.error) # 11 - SUB - 1Arg - A-=mem[A1] if (code == 11): self.internal_state.A = self.internal_state.A - \ self.memory.read( self.memory.info[self.internal_state.PC + 1], self.error) # 12 - MULT - 1Arg - A*=mem[A1] if (code == 12): self.internal_state.A = self.internal_state.A * \ self.memory.read( self.memory.info[self.internal_state.PC + 1], self.error) # 13 - DIV - 1Arg - A/=mem[A1] if (code == 13): self.internal_state.A = self.internal_state.A / \ self.memory.read( self.memory.info[self.internal_state.PC + 1], self.error) # 14 - RESTO - 1Arg - A%=mem[A1] if (code == 14): self.internal_state.A = self.internal_state.A % self.memory.read( self.memory.info[self.internal_state.PC + 1], self.error) # 15 - NEG - 0Arg - A = -A if (code == 15): self.internal_state.A = -1 * (self.internal_state.A) # 16 - DESV - 0Arg - PC = A1 if (code == 16): self.internal_state.PC = self.memory.read( self.internal_state.PC + 1, self.error) self.internal_state.PC = self.internal_state.PC - 2 # 17 - DESVZ - 1Arg - se A for 0, PC = A1 if (code == 17): if (self.internal_state.A == 0): self.internal_state.PC = self.memory.read( self.internal_state.PC + 1, self.error) self.internal_state.PC = self.internal_state.PC - 2 # 18 - DESVNZ - 1Arg - se A não for 0, PC = A1 if (code == 18): if (self.internal_state.A != 0): self.internal_state.PC = self.memory.read( self.internal_state.PC + 1, self.error) self.internal_state.PC = self.internal_state.PC - 2 def cpu_execute(self, clock): self.print_internal_state(clock) self.read_instruction(self.base_instructions)
def simpleUnaryExpression(self, indice): if not self.existeToken(indice): Error.EsperaTokenFimArquivo(self.infoTokens[indice],TipoToken.OPNao.name) return indice if self.tokens[indice] == TipoToken.OPNao.name: indice += 1 return self.unaryExpression(indice) if self.tokens[indice] == TipoToken.SepAbreParentese.name: indice += 1 if not self.existeToken(indice): Error.EsperaTokenFimArquivo(self.infoTokens[indice],"simpleUnaryExpression") return indice if self.ehUmBasicType(indice) : aux = self.basicType(indice) if not self.existeToken(indice): Error.EsperaTokenFimArquivo(self.infoTokens[indice],TipoToken.SepFechaParentese.name) return indice if self.tokens[indice] == TipoToken.SepAbreColchete.name: indice = self.referenceType(indice) if not self.existeToken(indice): Error.EsperaTokenFimArquivo(self.infoTokens[indice],TipoToken.SepFechaParentese.name) return indice if self.tokens[indice] != TipoToken.SepFechaParentese.name : Error.RecebeuTokenInesperado(self.infoTokens[indice],TipoToken.SepFechaParentese.name, self.tokens[indice]) indice += 1 return self.simpleUnaryExpression(indice) else: indice = aux if not self.existeToken(indice): Error.EsperaTokenFimArquivo(self.infoTokens[indice],TipoToken.SepFechaParentese.name) return indice if(self.tokens[indice] != TipoToken.SepFechaParentese.name): Error.RecebeuTokenInesperado(self.infoTokens[indice],TipoToken.SepFechaParentese.name, self.tokens[indice]) indice += 1 return self.unaryExpression(indice) elif self.ehUmReferenceType(indice): indice = self.referenceType(indice) if not self.existeToken(indice): Error.EsperaTokenFimArquivo(self.infoTokens[indice],TipoToken.SepFechaParentese.name) return indice if self.tokens[indice] != TipoToken.SepFechaParentese : Error.RecebeuTokenInesperado(self.infoTokens[indice],TipoToken.SepFechaParentese.name, self.tokens[indice]) indice += 1 return self.simpleUnaryExpression(indice) else: indice += 1 return self.postfixExpression(indice) indice = self.postfixExpression(indice) return indice
def parameterName(value, position): """A name of a class.""" if not value[0].islower(): return Error('BadParameterName', 'Parameter names must not be capitalized', position, LINES) return None
def weight(): try: if request.method == "POST": # Check if JSON request if not request.json: raise Error("JSON_FORMAT") post_data = request.json["data"] for row in post_data: # bin_id, datetime, and weight must be present in body if (("bin_id" not in row) or ("datetime" not in row) or ("weight" not in row)): raise Error("MISSING_KEY") # bin_id, datetime, or weight cannot be null if ((row["bin_id"] is None) or (row["datetime"] is None) or (row["weight"] is None)): raise Error("NULL_VALUE") # Check if bin_id and return 400 if invalid bin_id thebin = models.BinInfo.query.get(row["bin_id"]) if thebin is None: raise Error("INVALID_BIN_ID") # Add weight data to db addToDatabase( models.BinWeight( datetimestamp=row["datetime"], bin_weight=row["weight"], bin=thebin, )) return "Posted: " + str(request.json), 201 elif request.method == "GET": # Get query parameters bin_id = request.args.get("bin_id") start_timestamp = request.args.get("start_timestamp") end_timestamp = request.args.get("end_timestamp") # None of the parameters can be null if bin_id is None or start_timestamp is None or end_timestamp is None: raise Error("NULL_VALUE") else: # Get entries with same bin_id and between start_timestamp and end_timestamp weight_data = models.BinWeight.query.filter( and_( models.BinWeight.bin_id == bin_id, between( models.BinWeight.datetimestamp, start_timestamp, end_timestamp, ), )).all() ret = {"data": []} for row in weight_data: keys = ["timestamp", "bin_weight", "bin_id"] vals = [str(row.datetimestamp), row.bin_weight, row.bin_id] ret["data"].append(dict(zip(keys, vals))) return jsonify(ret), 200 except Error as e: return Error.em[str(e)] except Exception as e: return str(e), 400
def className(value, position): """A name of a class.""" if not value[0].isupper(): return Error('BadClassName', 'Class names must be capitalized', position, LINES) return None
def fullness_info(): try: if request.method == "GET": bin_id = request.args.get("bin_id") start_timestamp = request.args.get("start_timestamp") end_timestamp = request.args.get("end_timestamp") # throw error if any required parameters are None if bin_id == None or start_timestamp == None or end_timestamp == None: raise Error("NULL_VALUE") # throw error if start_timestamp > end_timestamp if start_timestamp > end_timestamp: raise Error("TIMESTAMP_ISSUE") bins = models.BinFullness.query.filter( and_( models.BinFullness.bin_id == bin_id, start_timestamp <= models.BinFullness.datetimestamp, end_timestamp >= models.BinFullness.datetimestamp, )).all() ret = {"data": []} for the_bin in bins: keys = ["id", "fullness", "bin_id", "datetimestamp"] vals = [ the_bin.id, the_bin.fullness, the_bin.bin_id, str(the_bin.datetimestamp), ] ret["data"].append(dict(zip(keys, vals))) return jsonify(ret), 200 elif request.method == "POST": # Process request data if not request.json: raise Error("JSON_FORMAT") post_data = request.json["data"] for row in post_data: # bin_id, datetime, and fullness must be present in body if (("bin_id" not in row) or ("datetime" not in row) or ("fullness" not in row)): raise Error("MISSING_KEY") # bin_id, datetime, or weight cannot be null if ((row["bin_id"] is None) or (row["datetime"] is None) or (row["fullness"] is None)): raise Error("NULL_VALUE") # Check bin_id and return 400 if invalid bin_id thebin = models.BinInfo.query.get(row["bin_id"]) if thebin is None: raise Error("INVALID_BIN_ID") # Add fullness to db addToDatabase( models.BinFullness( datetimestamp=row["datetime"], fullness=row["fullness"], bin=thebin, )) return "Posted: " + str(request.json), 201 except Error as e: return Error.em[str(e)] except Exception as e: return str(e), 400
def bin_info(): try: if request.method == "POST": # Check if JSON request if not request.json: raise Error("JSON_FORMAT") # Process request data post_data = request.json["data"] for row in post_data: # bin_id, bin_height, location, bin_type, and waste_metrics must be present in body if (("ip_address" not in row) or ("bin_height" not in row) or ("location" not in row) or ("bin_type" not in row) or ("waste_metrics" not in row)): raise Error("MISSING_KEY") # bin_id, bin_height, location, bin_type, and waste_metrics cannot be null if ((row["ip_address"] is None) or (row["bin_height"] is None) or (row["location"] is None) or (row["bin_type"] is None) or (row["waste_metrics"] is None)): raise Error("NULL_VALUE") addToDatabase( models.BinInfo( ip_address=row["ip_address"], bin_height=row["bin_height"], location=row["location"], bin_type=row["bin_type"], waste_metrics=row["waste_metrics"], )) return "Posted: " + str(request.json), 201 elif request.method == "GET": bin_id = request.args.get( "bin_id") # make sure you have the bin id if bin_id is None: # checking is bin exists raise Error("NULL_VALUE") else: # Get entries with same bin_id and between start_timestamp and end_timestamp bin_data = models.BinInfo.query.filter( models.BinInfo.id == bin_id).all() # returns the bin data with matching bin id if ( len(bin_data) > 0 ): # check if the bin id surpasses the amount of bins in database b = bin_data[0] keys = [ "id", "ip_address", "bin_height", "location", "bin_type", "waste_metrics", ] # keys vals = [ b.id, b.ip_address, b.bin_height, b.location, b.bin_type, b.waste_metrics, ] # values return ( jsonify(dict(zip(keys, vals))), 200, ) # return the json of the keys and values :) else: return ( jsonify({"message": "No Bin Found"}), 404, ) # return error message is there's an error except Error as e: return Error.em[str(e)] except Exception as e: return str(e), 400
def namespaceName(value, position): """A name of a namespace.""" if not value[0].islower(): return Error('BadNamespaceName', 'Namespace name must start with a lower case letter', position, LINES) return None