def move(self, destination): """Moves the file to the given path""" destination = os.path.normpath(destination) if utils.DEBUG_LEVEL >= 1: utils.log_message("DEBUG", "Moving file to path " + str(destination)) reopen = False if self.is_open(): reopen = True self.close() if os.path.exists(destination): if os.path.isdir(destination): os.rmdir(destination) else: os.remove(destination) try: os.makedirs(os.path.split(destination)[0]) except Exception as _: pass shutil.move(self.get_path(), destination) self.path = destination if reopen: self.file = open(destination)
def send(self, socket): """ Sends the packet encoded over the socket. @param socket: The socket to send the packet to. @type socket: socket.socket """ body = bytearray() file_path = self.file_info.path file_size = self.file_info.size last_modified = self.file_info.last_modified body.extend(byte_utils.char_to_bytes(len(file_path))) body.extend(byte_utils.boolean_to_bytes(self.file_info.is_directory)) body.extend(byte_utils.unsigned_int_to_bytes(last_modified)) if not self.file_info.is_directory: body.extend(byte_utils.unsigned_int_to_bytes(file_size)) body.extend(byte_utils.string_to_bytes(file_path)) socket.sendall(body) # append file's content from file_info.file_wrapper.chunks() if is not # directory if not self.file_info.is_directory: for chunk in self.file_info.file_wrapper.chunks(self.CHUNK_SIZE): if utils.DEBUG_LEVEL >= 3: utils.log_message("DEBUG", "Chunk size: " + str(len(chunk))) socket.sendall(chunk)
def create_dirs(directory): log_message('INFO', "Running rdo_auto_release in %s directory" % directory, logfile, stdout_only=True) dir_list = [directory, datadir, logdir, repodir] for directory in dir_list: if not os.path.exists(directory): os.makedirs(directory)
def process_reviews(args): distroinfo = info.DistroInfo( info_files='rdo.yml', remote_info=rdoinfo_repo) inforepo = distroinfo.get_info() if args.number: after_fmt = None else: after = datetime.datetime.now() - datetime.timedelta(days=args.days) after_fmt = after.strftime('%Y-%m-%d') reviews = review_utils.get_osp_releases_reviews(args.release, after=after_fmt, number=args.number, status='merged') for review in reviews: rev_num = review['_number'] log_message('INFO', "Processing review %s" % rev_num, logfile) new_pkgs = new_pkgs_review(review, inforepo) for new_pkg in new_pkgs: if new_pkg['osp_release'] == args.release: process_package(new_pkg['name'], new_pkg['version'], new_pkg['osp_release'], args.dry_run, check_tarball=True, chglog_user=args.changelog_user, chglog_email=args.changelog_email, rdoinfo_tag=args.rdoinfo_tag)
def saveCopyFileImg(filename, params, output_folder, last_folder): to_folders = {"year": "", "month": "", "day": ""} files_copied = "" for (key, val) in params.items(): if "Dates" in key: for (i, j) in val.items(): if i == "DateOldest": to_folders["year"] = datetime.datetime.strptime( j, "%Y/%m/%d").strftime("%Y") to_folders["month"] = datetime.datetime.strptime( j, "%Y/%m/%d").strftime("%B") to_folders["day"] = datetime.datetime.strptime( j, "%Y/%m/%d").strftime("%d") destine_dir = str(output_folder + to_folders["year"] + "/" + to_folders["month"] + "/") if last_folder: destine_dir = str(output_folder + to_folders["year"] + "/" + to_folders["month"] + "/" + last_folder + "/") try: if not os.path.isdir(destine_dir): os.makedirs(os.path.dirname(destine_dir), mode=0o777) name = os.path.basename(filename) destine_dir = destine_dir + name shutil.copy2(filename, destine_dir) file_copied = True except (IOError, EOFError, ValueError, Exception) as err: msg_err = str(sys.exc_info()[0]) utils.log_message("NOK", ID_PROGRAM, "saveCopyFile", msg_err, err) if file_copied: msg_result = str("Saved to: (" + destine_dir + ")") utils.log_message("OK", ID_PROGRAM, "saveCopyFile", msg_result, "OK") return file_copied
def signup(): form = SignUpForm() if form.validate_on_submit(): acct = Account(company_name=form.company_name.data) db.session.add(acct) db.session.commit() user = User(acct_id=acct.id, first_name=form.first_name.data, last_name=form.last_name.data, email=form.email.data, password=form.password.data) db.session.add(user) db.session.commit() token = user.generate_token() print_debug(url_for('auth.confirm', token=token, _external=True)) log_message(f'acct_id: {acct.id} just signed up') send_email(user.email, 'Confirm Your Account', 'confirm', '*****@*****.**', user=user, token=token) flash( 'You have been registered. A confirmation email is sent to your email address. \ You have 24 hours to verify your account.') login_user(user) return redirect(url_for('dash.index')) return render_template('auth/signup.html.j2', form=form)
async def ws_handler_async(self, websocket, path): """ Async websocket handler Args: websocket (Websocket): incoming connection path (str): incoming connection resource path """ # new connection await self.handler.handle_connection(websocket) try: # message in connection async for message in websocket: # If max_message_len is a valid value, slice message before handling if self.max_message_len >= 0: await self.handler.handle_message( websocket, message[:self.max_message_len]) else: await self.handler.handle_message(websocket, message) except websockets.exceptions.ConnectionClosed: # Log connection exception log_message( logger, f"Exception: ConnectionClosed in websocket {websocket}", logging.WARNING) finally: # websocket disconnects await self.handler.handle_disconnect(websocket)
async def stop(self): """ Handles a server shutdown, waits for the handler to do whatever it needs to first """ await self.handler.handle_shutdown() asyncio.get_event_loop().stop() self.running = False log_message(logger, "SHUTDOWN COMPLETE", logging.CRITICAL)
def receive_request(request_file_packet): """Creates a send object packet and sends it to the ObjectSocket""" path = request_file_packet.file_info.path utils.log_message("INFO", "Received request to send file: " + path) abs_path = os.path.join(self.directory.get_path(), path) obj = get_wrapper(abs_path) info = packets.FileInfo(path=path, file_wrapper=obj) send_file_packet = packets.SendFilePacket(info) self.object_socket.send_object(send_file_packet) return 0
def removeTags(list_tags): tags = ["MakerNote", "Copyright", "UserComment"] new_list = {} for (key, val) in list_tags.items(): if not key in tags: new_list[key] = val else: utils.log_message("OK", ID_PROGRAM, "removeTags", "", "Tag " + key + " removed.") utils.log_message("OK", ID_PROGRAM, "removeTags", "", "Tags removed.") return new_list
def do_login(self, user, directory): """Creates a login packet and sends it to the ObjectSocket""" utils.log_message("INFO", "Sending login") self.directory = Directory(directory) obj_list = [] for file_iterator in self.directory.list(directories_after_files=True): obj_list.append(packets.FileInfo(\ path=file_iterator.get_relpath(self.directory.get_path()),\ file_wrapper=file_iterator)) login_packet = packets.LoginPacket(user, os.path.split(directory)[1], obj_list) self.object_socket.send_object(login_packet)
def add_facility(): form = AddFacilityForm() if form.validate_on_submit(): facility = Facility(acct_id=current_user.acct_id, name=form.name.data) db.session.add(facility) db.session.commit() log_message( f'user_id: {current_user.id} added facility: {facility.id}') flash('Facility created', 'success') return jsonify('OK'), 201 else: return jsonify(form.errors)
def __init__(self, pattern="^!.+"): """ Create a new command handler Args: pattern (str, optional): regex pattern for a command. Defaults to "^!.+". """ self._regex = re.compile(pattern) self.registered_commands = registered log_message(logger, f"Registered Commands: {self.registered_commands.keys()}", logging.INFO)
def login(): form = LoginForm() if form.validate_on_submit(): user = User.get(form.email.data) login_user(user) log_message(f'user_id: {user.id} logged in') if not user.verified: url = url_for('auth.resend_confirm') link = f'<a href="{url}">Resend</a>' flash(f'Your account is still not verified. {link}', 'danger') flash('Login Successful', 'success') return redirect(url_for('dash.index')) return render_template('auth/login.html.j2', form=form)
def resend_confirm(): token = current_user.generate_token() print_debug(url_for('auth.confirm', token=token, _external=True)) send_email(current_user.email, 'Confirm Your Account', 'confirm', '*****@*****.**', user=current_user, token=token) flash('A new confirmation email is sent to your email address. \ You have 24 hours to verify your account.') log_message(f'user_id: {current_user.id} resent their confirmation email') return redirect(url_for('main.index'))
def reset(): token = request.args.get('token') if not token: log_message(f'no token attempted reset') abort(404) try: user = User.deserialize(token) except SignatureExpired: flash('Expired Token', 'danger') log_message('expired token reset attempt') return redirect(url_for('main.index')) except BadSignature: flash('Invalid token', 'danger') log_message('bad signature reset attempt') return redirect(url_for('main.index')) form = ResetPasswordForm() if form.validate_on_submit(): log_message(f'user_id: {user.id} changed password') user.change_password(form.password.data) db.session.add(user) db.session.commit() flash('Password reset', 'success') login_user(user) return redirect(url_for('dash.index')) return render_template('auth/reset.html.j2', form=form)
def update_facility(): form = UpdateFacilityForm() if form.validate_on_submit(): facility = Facility.query.filter( Facility.acct_id == current_user.acct_id, Facility.id == form.facility_id.data).first() facility.name = form.name.data db.session.add(facility) db.session.commit() log_message( f'user_id: {current_user.id} updated facility: {facility.id}') flash('Facility Updated', 'success') return jsonify('OK') else: return jsonify(form.errors)
def process_rdoinfo(args): if args.rdoinfo_tag is None: rdoinfo_tag = args.release else: rdoinfo_tag = args.rdoinfo_tag new_pins = rdoinfo_utils.get_new_pinned_builds(args.rdoinfo_pins, rdoinfo_tag) for pin in new_pins: log_message('INFO', "rdoinfo Found new package %s %s %s" % ( pin['name'], pin['version'], pin['release']), logfile) process_package(pin['name'], pin['version'], args.release, args.dry_run, check_tag=True, chglog_user=args.changelog_user, chglog_email=args.changelog_email, rdoinfo_tag=rdoinfo_tag)
def get(self, item, default=None): """ Get an item from loaded config or default value Args: item (str): Item to get default (any, optional): optional default value. Defaults to None. Returns: dict: retrieved item """ if item in self.config: return self.config[item] log_message(logger, f"Config item {item} not found, using default", logging.WARNING) return default
def logout(logout_packet): """Receives a logout packet and terminates""" utils.log_message("INFO", "Received logout") if not logout_packet.is_reply: out_logout_packet = packets.LogoutPacket(True, logout_packet.is_busy) self.object_socket.send_object(out_logout_packet) if logout_packet.is_busy: utils.log_message("ERROR", "Another user is already synchronizing this directory...") elif logout_packet.is_reply: directory_path = self.directory.get_path() MessageHandler.locked_directories_lock.acquire() MessageHandler.locked_directories.remove(directory_path) MessageHandler.locked_directories_lock.release() return -1
async def send(self, response, websocket): """Send a response to a websocket Args: response (Response): The Response to send websocket (Websocket): The websocket to send the Response to """ if not isinstance(response, Response): log_message( f"Outgoing: {response} is not of type Response, preventing send", logging.CRITICAL) return if response.data["origin"] == Origin.DEFAULT: log_message(f"Outgoing response has DEFAULT origin", logging.WARNING) await websocket.send(response.json())
def parse_common_crawl(self, link_metadata: List[str], link_to_check: str) -> List[str]: """ Parse common crawl URL """ final_links = [] url = f"{link_to_check}/*" for loc in link_metadata: link_location = f"{loc}?output=json&fl=url&url=" try: req = rq.get(link_location + url) for i in req.text.split("\n"): url_info = json.loads(i)["url"] if url_info not in final_links: final_links.append(url_info) except Exception: message = f"Seeding error for link : {link_location + url}" log_message(message, 'sed') return final_links
def read_csv_file(file_name, sep_of_file): logger = log_message() try: csv_data = pd.read_csv(file_name, sep=sep_of_file) logger.info( 'Successfully read the CSV file named {}'.format(file_name)) return csv_data except: logger.exception( 'Issue occurred while reading the content of {} table'.format( file_name))
def main(): '''Starts execution once everything is loaded''' hostname = sys.argv[1] port = int(sys.argv[2]) username = sys.argv[3] directory = sys.argv[4].rstrip('/') client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: client_socket.connect((hostname, port)) except Exception as _: utils.log_message("ERROR", "PyBox is currently unavailable due to socket error") client_socket.close() return object_socket = ObjectSocket(client_socket) message_handler = MessageHandler(object_socket) message_handler.do_login(username, directory) message_handler.process()
def logout(logout_packet): """Receives a logout packet and terminates""" utils.log_message("INFO", "Received logout") if not logout_packet.is_reply: out_logout_packet = packets.LogoutPacket( True, logout_packet.is_busy) self.object_socket.send_object(out_logout_packet) if logout_packet.is_busy: utils.log_message( "ERROR", "Another user is already synchronizing this directory...") elif logout_packet.is_reply: directory_path = self.directory.get_path() MessageHandler.locked_directories_lock.acquire() MessageHandler.locked_directories.remove(directory_path) MessageHandler.locked_directories_lock.release() return -1
def read_excel_file(file_name, sheet_name): logger = log_message() try: excel_data = pd.read_excel(file_name, sheet_name) logger.info( 'Successfully read the Excel file named {}'.format(file_name)) return excel_data except: logger.exception( 'Issue occurred while reading the content of {} table'.format( file_name))
def move(self, destination): """Moves the directory to the given path""" destination = os.path.normpath(destination) if utils.DEBUG_LEVEL >= 1: utils.log_message("DEBUG", "Moving directory to path " + str(destination)) if os.path.exists(destination): if os.path.isdir(destination): os.rmdir(self.get_path()) self.path = destination return else: os.remove(destination) try: os.makedirs(os.path.split(destination)[0]) except Exception as _: pass shutil.move(self.get_path(), destination) self.path = destination
def main(): '''Starts execution once everything is loaded''' hostname = sys.argv[1] port = int(sys.argv[2]) username = sys.argv[3] directory = sys.argv[4].rstrip('/') client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: client_socket.connect((hostname, port)) except Exception as _: utils.log_message( "ERROR", "PyBox is currently unavailable due to socket error") client_socket.close() return object_socket = ObjectSocket(client_socket) message_handler = MessageHandler(object_socket) message_handler.do_login(username, directory) message_handler.process()
def new_pkgs_review(review, inforepo): review_number = review['_number'] log_message('INFO', "Processing releases for review %s" % review_number, logfile) new_pkgs = [] new_releases = releases_utils.get_new_releases_review(review) for release in new_releases: for repo in release['repos']: log_message('INFO', "%s Found new repo version %s %s" % ( review_number, repo, release['version']), logfile) pkg = query.find_package(inforepo, repo, strict=True) if not pkg: # Some openstack packages are special and name in RDO != # that repo name, i.e.: oslo.log vs oslo-log repo_url = 'git://git.openstack.org/%s' % repo pkg = query.find_package(inforepo, repo_url, strict=True) if pkg: log_message('INFO', "%s Found new package %s %s" % ( review_number, pkg['name'], release['version']), logfile) pkg = {'name': pkg['name'], 'version': release['version'], 'osp_release': release['release']} new_pkgs.append(pkg) return new_pkgs
def findFiles(rootFolder, typesFiles, output_folder): list_files = [] last_folder = "" result = False print("Escanning folder: " + rootFolder) for root, dirs, files in os.walk(rootFolder): for name in files: file_to_write = os.path.join(root, name) if file_to_write.endswith(tuple(typesFiles)): params = removeTags(getParamsImg(file_to_write)) last_folder = os.path.basename(os.path.normpath(root)) if last_folder == rootFolder: last_folder = "" try: result = saveCopyFileImg(file_to_write, params, output_folder, last_folder) #print(file_to_write, params, output_folder, last_folder) list_files.append(file_to_write) except Exception as identifier: utils.log_message("NOK", ID_PROGRAM, "findFiles", "File not copied: " + file_to_write, str(identifier)) continue else: utils.log_message("NOK", ID_PROGRAM, "findFiles", "File format not identified: ", file_to_write) if result: utils.log_message("OK", ID_PROGRAM, "findFiles", "Finished", str(result)) return list_files
def receive_object(self): """ Reds a packet object from the socket. @return: A packet object, you can distinct them using #instanceof. None of nothing was read, probably because the connection was shutdown. @rtype: LoginPacket or FileChangedPacket or RequestFilePacket or SendFilePacket """ # Read the header header = bytearray(1) bytes_read = self.socket.recv_into(header) if bytes_read == 0: return None # Parse the packet id and decode it packet_id = bytes_to_char(header) if utils.DEBUG_LEVEL >= 2: utils.log_message("DEBUG", "Packet id: " + str(packet_id)) for clazz in self.PACKET_CLASSES: if packet_id == clazz.ID: return clazz.decode(self.socket) return None
def read_table(env, table): logger = log_message() try: connection = mysql_connection(env) cursor = connection.cursor() cursor.execute('select * from {}'.format(table)) column_names = cursor.column_names logger.info('Successfully read the {} table'.format(table)) return [cursor.fetchall(), column_names] except: logger.exception( 'Issue occurred while reading the content of {} table'.format( table)) finally: connection.close()
def receive_object(send_file_packet): """Receives a send file packet, and processes it""" info = send_file_packet.file_info utils.log_message("INFO", "Receiving object: " + info.path) info.file_wrapper.move(os.path.join(self.directory.get_path(), info.path)) info.file_wrapper.set_timestamp(info.last_modified) if utils.DEBUG_LEVEL >= 3: utils.log_message("DEBUG", "Object has been moved to: " + str(info.file_wrapper.get_path())) utils.log_message("DEBUG", "Timestamp has been set to: " + str(utils.format_timestamp(info.file_wrapper.get_timestamp()))) return 0
def getParamsImg(file_path): result = {} #time of most recent content modification recent_mod_file = datetime.datetime.fromtimestamp( os.stat(file_path).st_mtime).strftime("%Y/%m/%d") #platform dependent; time of most recent metadata change on Unix, or the time of creation on Windows recent_meta_file = datetime.datetime.fromtimestamp( os.stat(file_path).st_ctime).strftime("%Y/%m/%d") dates = { "DateRecentModification": recent_mod_file, "DateRecenteMetadaCreation": recent_meta_file } result["FilePath"] = file_path result["Msg"] = "" msg_err = "" try: working_img = Image.open(file_path) working_img.verify() working_img = Image.open(file_path) exif = working_img._getexif() working_img.close() if not exif: result["Msg"] = "Exif data not found." else: for (key, val) in exif.items(): decoded = TAGS.get(key, key) if key in TAGS: if "Date" in decoded: val = datetime.datetime.strptime( val, "%Y:%m:%d %H:%M:%S").strftime("%Y/%m/%d") dates[decoded] = val else: result[decoded] = val else: msg_err = "No TAG Identified: " + str( key) + " | decoded: " + str(decoded) except (IOError, EOFError, ValueError, Exception) as err: msg_err = str(sys.exc_info()[0]) msg_err = str(file_path) + str("|") + str(err) + str("|") + str( msg_err) utils.log_message("NOK", ID_PROGRAM, "getParameters", "", msg_err) except ValueError as error: msg_err = str(error) utils.log_message("NOK", ID_PROGRAM, "getParameters", "", msg_err) finally: #if working_img: if not result["Msg"]: result["Msg"] = msg_err dates["DateOldest"] = setOldestDate(dates) result["Dates"] = dates if result: utils.log_message("OK", ID_PROGRAM, "getParameters", "", "Parameters OK.") return result
def decode(socket): if utils.DEBUG_LEVEL >= 3: utils.log_message("DEBUG", "Decode logout packet") fixed = bytearray(2) socket.recv_into(fixed, flags=MSG_WAITALL) is_reply = byte_utils.bytes_to_boolean(fixed, 0) is_busy = byte_utils.bytes_to_boolean(fixed, 1) if utils.DEBUG_LEVEL >= 3: utils.log_message("DEBUG", "Is reply: " + str(is_reply)) utils.log_message("DEBUG", "Is busy: " + str(is_busy)) return LogoutPacket(is_reply, is_busy)
def decode(socket): fixed = bytearray(1) socket.recv_into(fixed, flags=MSG_WAITALL) file_path_length = byte_utils.bytes_to_char(fixed, 0) strings = bytearray(file_path_length) socket.recv_into(strings, flags=MSG_WAITALL) file_path = byte_utils.bytes_to_string(strings, file_path_length, 0) if utils.DEBUG_LEVEL >= 3: utils.log_message("DEBUG", "Decoded request file packet: ") utils.log_message("DEBUG", "File path length: " + str(file_path_length)) utils.log_message("DEBUG", "File Path: " + str(file_path)) packet = RequestFilePacket(FileInfo(file_path)) return packet
def receive_object(send_file_packet): """Receives a send file packet, and processes it""" info = send_file_packet.file_info utils.log_message("INFO", "Receiving object: " + info.path) info.file_wrapper.move( os.path.join(self.directory.get_path(), info.path)) info.file_wrapper.set_timestamp(info.last_modified) if utils.DEBUG_LEVEL >= 3: utils.log_message( "DEBUG", "Object has been moved to: " + str(info.file_wrapper.get_path())) utils.log_message( "DEBUG", "Timestamp has been set to: " + str( utils.format_timestamp( info.file_wrapper.get_timestamp()))) return 0
def decode(socket): fixed = bytearray(6) socket.recv_into(fixed, flags=MSG_WAITALL) username_length = byte_utils.bytes_to_char(fixed, 0) directory_name_length = byte_utils.bytes_to_char(fixed, 1) files_count = byte_utils.bytes_to_unsigned_int(fixed, 2) dynamic = bytearray(username_length + directory_name_length) socket.recv_into(dynamic, flags=MSG_WAITALL) username = byte_utils.bytes_to_string(dynamic, username_length, 0) directory_name = byte_utils.bytes_to_string(dynamic, directory_name_length, username_length) if utils.DEBUG_LEVEL >= 3: utils.log_message("DEBUG", "Decoded login packet: ") utils.log_message("DEBUG", "Username length: " + str(username_length)) utils.log_message("DEBUG", "Directory name length: " + str(directory_name_length)) utils.log_message("DEBUG", "Files count: " + str(files_count)) utils.log_message("DEBUG", "Username: "******"DEBUG", "Directory name: " + str(directory_name)) utils.log_message("DEBUG", "Files: ") # Parse all file info files = [] for count in range(files_count): if utils.DEBUG_LEVEL >= 2: utils.log_message("DEBUG", "Waiting for file info " + str(count) + "/" + str(files_count)) fixed = bytearray(6) socket.recv_into(fixed, flags=MSG_WAITALL) file_path_length = byte_utils.bytes_to_char(fixed, 0) file_is_directory = byte_utils.bytes_to_boolean(fixed, 1) file_last_modified = byte_utils.bytes_to_unsigned_int(fixed, 2) strings = bytearray(file_path_length) socket.recv_into(strings, flags=MSG_WAITALL) file_path = byte_utils.bytes_to_string(strings, file_path_length, 0) if utils.DEBUG_LEVEL >= 3: utils.log_message("DEBUG", "File path length: " + str(file_path_length)) utils.log_message("DEBUG", "Is directory: " + str(file_is_directory)) utils.log_message("DEBUG", "File timestamp: " + str(utils.format_timestamp(file_last_modified))) utils.log_message("DEBUG", "File path: " + str(file_path)) files.append(FileInfo(file_path, file_is_directory, file_last_modified)) packet = LoginPacket(username, directory_name, files) return packet
def decode(socket): fixed = bytearray(6) socket.recv_into(fixed, flags=MSG_WAITALL) file_path_length = byte_utils.bytes_to_char(fixed, 0) file_is_directory = byte_utils.bytes_to_boolean(fixed, 1) file_last_modified = byte_utils.bytes_to_unsigned_int(fixed, 2) file_size = None if not file_is_directory: fixed = bytearray(4) socket.recv_into(fixed, flags=MSG_WAITALL) file_size = byte_utils.bytes_to_unsigned_int(fixed, 0) strings = bytearray(file_path_length) socket.recv_into(strings, flags=MSG_WAITALL) file_path = byte_utils.bytes_to_string(strings, file_path_length, 0) if utils.DEBUG_LEVEL >= 3: utils.log_message("DEBUG", "Decoded send file packet: ") utils.log_message("DEBUG", "File path length: " + str(file_path_length)) utils.log_message("DEBUG", "Is directory: " + str(file_is_directory)) utils.log_message("DEBUG", "Last modified: " + str(utils.format_timestamp(file_last_modified))) utils.log_message("DEBUG", "File size: " + str(file_size)) utils.log_message("DEBUG", "File Path: " + str(file_path)) # parse file's contents to File().write() 1024 chunks if is not directory if not file_is_directory: chunk_size = min(SendFilePacket.CHUNK_SIZE, file_size) remaining = file_size file_wrapper = File() received_bytes_acc = 0 while remaining > 0: if utils.DEBUG_LEVEL >= 3: utils.log_message("DEBUG", "Chunk size: " + str(chunk_size)) chunk = bytearray(chunk_size) received_bytes = socket.recv_into(chunk, flags=MSG_WAITALL) received_bytes_acc += received_bytes file_wrapper.write(chunk) remaining -= received_bytes chunk_size = min(chunk_size, remaining) file_wrapper.close() if utils.DEBUG_LEVEL >= 1: utils.log_message("DEBUG", "File size is " + str(file_size) + " and received bytes are " + str( received_bytes_acc)) utils.log_message("DEBUG", "File is located in " + str(file_wrapper.get_path())) else: file_wrapper = Directory() packet = SendFilePacket(FileInfo(file_path, file_is_directory, file_last_modified, file_size, file_wrapper)) return packet
def process(self): """Processes the next message in queue. If no message is in queue, it awaits until one is and then processes it""" def receive_login(login_packet): """Receives a login packet and processes it, creating send_object and request_object packets as needed to synchronize""" def request_object(info): """Creates a request file packet and sends it to the ObjectSocket""" utils.log_message("INFO", "Requesting file/directory: " + info.path) request_file_packet = packets.RequestFilePacket(info) self.object_socket.send_object(request_file_packet) def send_object(info): """Creates a send object packet and sends it to the ObjectSocket""" utils.log_message("INFO", "Sending file/directory: " + info.path) send_file_packet = packets.SendFilePacket(info) self.object_socket.send_object(send_file_packet) utils.log_message("INFO", "Receiving login") self.directory = Directory(login_packet.username + "-" + login_packet.directory_name) # If directory is already being synchronized, disconnect directory_path = self.directory.get_path() MessageHandler.locked_directories_lock.acquire() if directory_path in MessageHandler.locked_directories: MessageHandler.locked_directories_lock.release() logout_packet = packets.LogoutPacket(False, True) self.object_socket.send_object(logout_packet) return 0 else: MessageHandler.locked_directories.append(directory_path) MessageHandler.locked_directories_lock.release() local_files = [] for file_iterator in self.directory.list(directories_after_files=True): local_files.append(packets.FileInfo(\ path=file_iterator.get_relpath(self.directory.get_path()),\ file_wrapper=file_iterator)) request_files = [] send_files = [] for local in local_files: found_match = False for remote in login_packet.files: if remote == local: found_match = True if local > remote and (isinstance(local, Directory) or (local.get_timestamp() >= int(utils.get_timestamp() - 60))): send_files.append(local) break if not found_match and (isinstance(local, Directory) or (local.get_timestamp() >= int(utils.get_timestamp() - 60))): send_files.append(local) for remote in login_packet.files: found_match = False for local in local_files: if local == remote: found_match = True if remote > local and (isinstance(remote, Directory) or (remote.get_timestamp() >= int(utils.get_timestamp() - 60))): request_files.append(remote) break if not found_match and (isinstance(remote, Directory) or (remote.get_timestamp() >= int(utils.get_timestamp() - 60))): request_files.append(remote) for request in request_files: request_object(request) for send in send_files: send_object(send) logout_packet = packets.LogoutPacket(False, False) self.object_socket.send_object(logout_packet) return 0 def receive_request(request_file_packet): """Creates a send object packet and sends it to the ObjectSocket""" path = request_file_packet.file_info.path utils.log_message("INFO", "Received request to send file: " + path) abs_path = os.path.join(self.directory.get_path(), path) obj = get_wrapper(abs_path) info = packets.FileInfo(path=path, file_wrapper=obj) send_file_packet = packets.SendFilePacket(info) self.object_socket.send_object(send_file_packet) return 0 def receive_object(send_file_packet): """Receives a send file packet, and processes it""" info = send_file_packet.file_info utils.log_message("INFO", "Receiving object: " + info.path) info.file_wrapper.move(os.path.join(self.directory.get_path(), info.path)) info.file_wrapper.set_timestamp(info.last_modified) if utils.DEBUG_LEVEL >= 3: utils.log_message("DEBUG", "Object has been moved to: " + str(info.file_wrapper.get_path())) utils.log_message("DEBUG", "Timestamp has been set to: " + str(utils.format_timestamp(info.file_wrapper.get_timestamp()))) return 0 def logout(logout_packet): """Receives a logout packet and terminates""" utils.log_message("INFO", "Received logout") if not logout_packet.is_reply: out_logout_packet = packets.LogoutPacket(True, logout_packet.is_busy) self.object_socket.send_object(out_logout_packet) if logout_packet.is_busy: utils.log_message("ERROR", "Another user is already synchronizing this directory...") elif logout_packet.is_reply: directory_path = self.directory.get_path() MessageHandler.locked_directories_lock.acquire() MessageHandler.locked_directories.remove(directory_path) MessageHandler.locked_directories_lock.release() return -1 packet_actions = { packets.LoginPacket: receive_login, packets.RequestFilePacket: receive_request, packets.SendFilePacket: receive_object, packets.LogoutPacket: logout } while True: packet_object = self.object_socket.receive_object() for packet_type in packet_actions: if isinstance(packet_object, packet_type): if packet_actions[packet_type](packet_object) == -1: utils.log_message("INFO", "Logging out") return break
def receive_login(login_packet): """Receives a login packet and processes it, creating send_object and request_object packets as needed to synchronize""" def request_object(info): """Creates a request file packet and sends it to the ObjectSocket""" utils.log_message("INFO", "Requesting file/directory: " + info.path) request_file_packet = packets.RequestFilePacket(info) self.object_socket.send_object(request_file_packet) def send_object(info): """Creates a send object packet and sends it to the ObjectSocket""" utils.log_message("INFO", "Sending file/directory: " + info.path) send_file_packet = packets.SendFilePacket(info) self.object_socket.send_object(send_file_packet) utils.log_message("INFO", "Receiving login") self.directory = Directory(login_packet.username + "-" + login_packet.directory_name) # If directory is already being synchronized, disconnect directory_path = self.directory.get_path() MessageHandler.locked_directories_lock.acquire() if directory_path in MessageHandler.locked_directories: MessageHandler.locked_directories_lock.release() logout_packet = packets.LogoutPacket(False, True) self.object_socket.send_object(logout_packet) return 0 else: MessageHandler.locked_directories.append(directory_path) MessageHandler.locked_directories_lock.release() local_files = [] for file_iterator in self.directory.list(directories_after_files=True): local_files.append(packets.FileInfo(\ path=file_iterator.get_relpath(self.directory.get_path()),\ file_wrapper=file_iterator)) request_files = [] send_files = [] for local in local_files: found_match = False for remote in login_packet.files: if remote == local: found_match = True if local > remote and (isinstance(local, Directory) or (local.get_timestamp() >= int(utils.get_timestamp() - 60))): send_files.append(local) break if not found_match and (isinstance(local, Directory) or (local.get_timestamp() >= int(utils.get_timestamp() - 60))): send_files.append(local) for remote in login_packet.files: found_match = False for local in local_files: if local == remote: found_match = True if remote > local and (isinstance(remote, Directory) or (remote.get_timestamp() >= int(utils.get_timestamp() - 60))): request_files.append(remote) break if not found_match and (isinstance(remote, Directory) or (remote.get_timestamp() >= int(utils.get_timestamp() - 60))): request_files.append(remote) for request in request_files: request_object(request) for send in send_files: send_object(send) logout_packet = packets.LogoutPacket(False, False) self.object_socket.send_object(logout_packet) return 0
def request_object(info): """Creates a request file packet and sends it to the ObjectSocket""" utils.log_message("INFO", "Requesting file/directory: " + info.path) request_file_packet = packets.RequestFilePacket(info) self.object_socket.send_object(request_file_packet)
def send_object(info): """Creates a send object packet and sends it to the ObjectSocket""" utils.log_message("INFO", "Sending file/directory: " + info.path) send_file_packet = packets.SendFilePacket(info) self.object_socket.send_object(send_file_packet)
def process_package(name, version, osp_release, dry_run, check_tag=False, check_tarball=False, chglog_user=None, chglog_email=None, rdoinfo_tag=None): log_message('INFO', "Processing package %s version %s for release %s" % (name, version, osp_release), logfile) if rdoinfo_tag is None: rdoinfo_tag = osp_release try: rdoinfo_pin = rdoinfo_utils.get_pin(name, rdoinfo_tag) if rdoinfo_pin and rdoinfo_pin != version: log_message('INFO', "Package %s pinned to version %s in rdoinfo" % (name, rdoinfo_pin), logfile) return clone_distgit(name, osp_release) if check_tag and not is_release_tag(name, version): log_message('INFO', "Package %s has not release tag %s" % (name, version), logfile) return old_evr = get_evr(name) new_vers = new_version(name, version, osp_release, dry_run=True, chglog_user=chglog_user, chglog_email=chglog_email) if new_vers_stderr(new_vers.stderr): log_message('INFO', new_vers_stderr(new_vers.stderr).group(1), logfile) new_evr = get_evr(name) if not is_newer(new_evr, old_evr): log_message('INFO', "Version %s is not newer that existing %s" % (new_evr, old_evr), logfile) return if check_tarball and not wait_for_tarball(name): tag_exists = is_release_tag(name, version) log_message('INFO', "Tarball for %s %s is not ready yet, " "Tag exists: %s" % (name, version, tag_exists), logfile) log_message('INFO', "Sending review for package %s version %s" % (name, version), logfile) new_version(name, version, osp_release, dry_run=dry_run, chglog_user=chglog_user, chglog_email=chglog_email) if dry_run: log_message('INFO', "Running in dry-run mode. Review is not sent", logfile) except NotBranchedPackage as e: log_message('INFO', "Package %s %s for %s is not required: %s" % (name, version, osp_release, e.message), logfile) except NotInRdoinfoRelease as e: log_message('INFO', "Package %s is not in release %s" % (name, osp_release), logfile) except Exception as e: log_message('ERROR', "Package %s %s for %s failed to build: %s" % (name, version, osp_release, e.message), logfile) raise e