def add_site(auth, fields): user = authenticate(username=auth.get('username'), password=auth.get('password')) auth['tenant'] = user.site.login_base site = Site(**fields) site.os_manager = OpenStackManager(auth=auth, caller=user) site.save() return site
def site_create(**new_site_data): new_site = Site() new_site.name = new_site_data['name'] new_site.description = new_site_data['description'] new_site.url = new_site_data['url'] new_site.path = new_site_data['path'] new_site.local_path = new_site.path new_site.save() return new_site
def create(self): nodetemplate = self.nodetemplate siteName = nodetemplate.name xos_args = self.get_xos_args() site = Site(**xos_args) site.caller = self.user site.save() self.postprocess(site) self.info("Created Site '%s'" % (str(site), ))
def push_to_queue(**ka): ''' Inserts a single job item into the work queue. :param job_type: A string representing the type of job to be inserted. 'Page','Index', eventually 'Fileinfo' :param data_integer: Any integer data passed along with the job. For a job control item, this is the number of items remaining for that particular job. :param blog: The blog object associated with the job. :param site: The site object associated with the job. :param priority: An integer, from 0-9, representing the processing priority associated with the job. Higher-priority jobs are processed first. Most individual pages are given a high priority; indexes are lower. ''' try: queue_job = Queue.get(Queue.job_type == ka['job_type'], Queue.data_integer == ka['data_integer'], Queue.blog == ka['blog'], Queue.site == ka['site']) except Queue.DoesNotExist: queue_job = Queue() else: return queue_job.job_type = ka['job_type'] queue_job.data_integer = int(ka.get('data_integer', None)) queue_job.blog = ka.get('blog', Blog()).id queue_job.site = ka.get('site', Site()).id queue_job.priority = ka.get('priority', 9) queue_job.is_control = ka.get('is_control', False) if queue_job.is_control: queue_job.data_string = (queue_job.job_type + ": Blog {}".format(queue_job.blog.for_log)) else: queue_job.data_string = ( queue_job.job_type + ": " + FileInfo.get(FileInfo.id == queue_job.data_integer).file_path) queue_job.date_touched = datetime.datetime.now() queue_job.save()
def create_site(request): if request.method == "POST": serializer = SiteSerializer(data=request.data) if serializer.is_valid(): now = str(datetime.today().timestamp()) image_name = "".join( [request.user.username, "_", now, "_image.png"]) Site( category=Category.objects.get( name=serializer.data["category"]), deadline=default_date(), image_path=image_name, url=serializer.data["url"], user=request.user, ).save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def save_site(data): """Create or update an site content type""" content_type = content_type_check_and_load("Site Content Type") if data["hidden_node_id"] and data["hidden_node_version"]: # Assume update existing_content = load_and_revise(data["hidden_node_id"], SiteRevision) update_object_hash_and_save(existing_content, data) return existing_content else: # Assume new site node = _register_node() site = Site( _version=1, _node_id=node._id, _lock="", site_name=data["site_name"], environment_name=data["environment_name"], local_build_dir=data["local_build_dir"], static_files_dir=data["static_files_dir"], hosting_type=data["hosting_type"], index_content=data["index_content"], menu_content=data["menu_content"], groups_content=data["groups_content"], ) # First save get's our article ID to include in the hash #!!! <- being first save db.session.add(site) db.session.commit() db.session.refresh(site) #!!! -> end first saved ### reuse hash and save here site._hash = _hash_table(site) # Hash after getting id site._hash_chain = _hash_table(site, chain=True) db.session.add(site) db.session.commit() db.session.refresh(site) content_obj = _associate_node(node, site, content_type) return content_obj
def sites_create(request): if request.method == "POST": form = SiteForm(request.user, request.POST) if form.is_valid(): url = form.cleaned_data["url"] category = form.cleaned_data["category"] deadline = form.cleaned_data["deadline"] now = str(datetime.today().strftime("%a%b%d%H:%M:%S%Y")) image_name = "".join( [request.user.username, "_", now, "_image.png"]) Site( category=category, deadline=deadline, image_path=image_name, url=url, user=request.user, ).save() messages.success(request, "Entry sucessfully saved - Saving a screen shot") return redirect("core:site_management") else: form = SiteForm(request.user) return render(request, "core/site_create.html", {"form": form})
def run(self): db_sites = Site.objects.all() for db_site in db_sites: self.local_sites[db_site.login_base] = db_site print "%s local sites" % len(db_sites) sites = self.api.GetSites({'peer_id': None}) print "%s remote sites" % len(sites) count = 0 for site in sites: self.remote_sites[site['site_id']] = site if site['login_base'] not in self.local_sites: new_site = Site(name=site['name'], login_base=site['login_base'], site_url=site['url'], enabled=site['enabled'], longitude=site['longitude'], latitude=site['latitude'], is_public=site['is_public'], abbreviated_name=site['abbreviated_name']) new_site.save() count += 1 self.local_sites[new_site.login_base] = new_site print "imported %s sites" % count
from xosconfig import Config from multistructlog import create_logger log = create_logger(Config().get("logging")) class XOSDefaultSecurityContext(object): grant_access = True write_access = True read_access = True xos_anonymous_site = Site( name="XOS Anonymous Site", enabled=True, hosts_nodes=False, hosts_users=True, login_base="xos", abbreviated_name="xos-anonymous", ) xos_anonymous_user = User( username="******", email="*****@*****.**", is_admin=False, site=xos_anonymous_site, ) class XOSAPIHelperMixin(XOSAuthHelperMixin): """ This helper contains several functions used to implement the autogenerated core API. It translates between the gRPC representation of objects and the django representation
def get_network_links(self): objs = [] # Connectivity object - Point to Point cordpod1device = NetworkDevice() cordpod1device.id = self.networkdevice.id # Edge to Edge Point Connectivity Objects edgetoedgeconnectivity = NetworkEdgeToEdgePointConnection() edgetoedgeconnectivity.uni1_createbuffer = cordpod1device.id + "." + "of:000000001/1" edgetoedgeconnectivity.uni2_createbuffer = cordpod1device.id + "." + "of:000000001/2" edgetoedgeconnectivity.type = 'direct' edgetoedgeconnectivity.operstate = 'active' edgetoedgeconnectivity.adminstate = 'enabled' edgetoedgeconnectivity.sid = 'EdgeToEdgePointConnectivity_1' objs.append(edgetoedgeconnectivity) # Multipoint to Multipoint Connectivity Objects multipoint2multipointconnectivity=NetworkMultipointToMultipointConnection() multipoint2multipointconnectivity.operstate = 'active' multipoint2multipointconnectivity.adminstate = 'enabled' multipoint2multipointconnectivity.type = 'ethernet' multipoint2multipointconnectivity.sid = 'MultipointToMultipointConnectivity_1' # # Create JSON array for post-save behaviour # eps = [] eps.append(cordpod1device.id + "." + "of:000000001/3") eps.append(cordpod1device.id + "." + "of:000000001/4") eps.append(cordpod1device.id + "." + "of:000000001/5") myjsonstr = {'eps': eps, 'foo':0, 'bar':0} multipoint2multipointconnectivity.eps_createbuffer = json.dumps(myjsonstr) objs.append(multipoint2multipointconnectivity) # Edge to Multipoint Connectivity Objects edge2multipointconnectivity = NetworkEdgeToMultipointConnection() edge2multipointconnectivity.operstate = 'active' edge2multipointconnectivity.adminstate = 'enabled' edge2multipointconnectivity.type = 'ethernet' edge2multipointconnectivity.sid = 'EdgeToMultipointConnectivity_1' edge2multipointconnectivity.root_createbuffer = cordpod1device.id + "." + "of:000000001/7" # # Create JSON array for post-save behaviour # eps = [] eps.append(cordpod1device.id + "." + "of:000000001/6") eps.append(cordpod1device.id + "." + "of:000000001/8") myjsonstr = {'eps': eps, 'foo': 0, 'bar': 0} edge2multipointconnectivity.eps_createbuffer = json.dumps(myjsonstr) objs.append(edge2multipointconnectivity) # Create Objects for VnodGlobal Sort of Testing # Bandwidth Profile bwprofile = BandwidthProfile() bwprofile.bwpcfgcbs = 0 bwprofile.bwpcfgcir = 0 bwprofile.bwpcfgebs = 0 bwprofile.bwpcfgeir = 0 bwprofile.name = 'TestBWP' objs.append(bwprofile) # Two Sites site1 = Site() site1.name = 'CORDPod1' site1.login_base = 'CordPod1' site1.site_url = 'http://1.2.3.4:8080/VnodLocalApi' objs.append(site1) site2 = Site() site2.name = 'CORDPod2' site2.login_base = 'CordPod2' site2.site_url = 'http://10.11.12.13:8080/VnodLocalApi' objs.append(site2) # Two Ports - one per Site remoteport1 = RemotePort() remoteport1.name = "CORDPOD1:Port1" remoteport1.sitename = 'CordPod1' remoteport1.edgeportname = cordpod1device.id + "." + "of:000000001/1" objs.append(remoteport1) remoteport2 = RemotePort() remoteport2.name = "CORDPOD2:Port1" remoteport2.sitename = 'CordPod2' remoteport2.edgeportname = cordpod1device.id + "." + "of:000000001/2" objs.append(remoteport2) # One Spoke/Site spoke1 = ServiceSpoke() spoke1.name = 'TestSpoke1' spoke1.remoteportname = "CORDPOD1:Port1" spoke1.remotevnodid = 'CORDPod1:VnodLocal:1' spoke1.operstate = 'inactive' spoke1.adminstate = 'enabled' spoke1.sitename = 'CordPod1' objs.append(spoke1) spoke2 = ServiceSpoke() spoke2.name = 'TestSpoke2' spoke2.remoteportname = "CORDPOD2:Port1" spoke2.remotevnodid = 'CORDPod2:VnodLocal:1' spoke2.operstate = 'active' spoke2.adminstate = 'enabled' spoke2.sitename = 'CordPod2' objs.append(spoke2) # One VnodGlobal Service vnodglobal = VnodGlobalService() vnodglobal.name = 'VnodGlobalPtToPtTest1' vnodglobal.type = 'eline' vnodglobal.vlanid = '100' vnodglobal.operstate = 'active' vnodglobal.adminstate = 'enabled' vnodglobal.servicehandle = 'testhandle1' vnodglobal.pointtopointsid = 'onos_eline_id' vnodglobal.bwpname = 'TestBWP' # Create JSON array for post-save behaviour # spokes = ['TestSpoke1', 'TestSpoke2'] myjsonstr = {'spokes': spokes} vnodglobal.spokes_createbuffer = json.dumps(myjsonstr) objs.append(vnodglobal) return objs
'url_to_crawl': 'http://mlweekly.com/', 'domain': '', 'slug': 'machine-learning-weekly', }, { 'name': 'MIT News', 'main_url': 'http://news.mit.edu/topic/machine-learning/', 'url_to_crawl': 'http://news.mit.edu/topic/machine-learning/', 'domain': 'http://news.mit.edu', 'slug': 'mit-news', }, ) REDDIT = sites_info[0]['name'] ML_MASTERY = sites_info[1]['name'] ML_WEEKLY = sites_info[2]['name'] MIT_NEWS = sites_info[3]['name'] PERIODS = ['all', 'today', 'yesterday', 'week', 'month'] ALL, TODAY, YESTERDAY, WEEK, MONTH = PERIODS if __name__ == '__main__': django_setup() from core.models import Site for site in sites_info: try: Site(**site).save() except IntegrityError: pass