def job_data(self,attributes): data = "" if len(attributes)>0: attr_query_arr=[] for attr in attributes: attr_query_arr.append("(%s=!=UNDEFINED)" % attr) data = data + """\ %(indent4)s<job query_expr=%(expr)s> %(indent5)s<match_attrs> """ % \ { "indent4" : common.indent(4), "indent5" : common.indent(5), "expr" : xmlFormat.xml_quoteattr(string.join(attr_query_arr," && ")),} for attr in attributes: data = data + """ %(indent6)s<match_attr name="%(attr)s" type="string"/> """ % { "indent6" : common.indent(6), "attr" : attr, } data = data + """\ %(indent5)s</match_attrs> %(indent4)s</job> """ % { "indent4" : common.indent(4), "indent5" : common.indent(5), } return data
def config_collectors_data(self): data = """ %(indent1)s<collectors> %(indent2)s<collector node="%(usercollector_node)s:%(usercollector_port)s" %(indent2)s DN="%(usercollector_gsi_dn)s" %(indent2)s secondary="False"/> """ % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), "usercollector_node" : self.usercollector.hostname(), "usercollector_port" : self.usercollector.collector_port(), "usercollector_gsi_dn" : self.usercollector.x509_gsi_dn(), } #--- secondary collectors -- if self.usercollector.secondary_collectors() <> 0: first_port = self.usercollector.secondary_collector_ports()[0] last_port = self.usercollector.secondary_collector_ports()[int(self.usercollector.secondary_collectors()) - 1] port_range = "%s-%s" % (first_port,last_port) data += """ %(indent2)s<collector node="%(usercollector_node)s:%(usercollector_port)s" %(indent2)s DN="%(usercollector_gsi_dn)s" %(indent2)s secondary="True"/> %(indent1)s """ % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), "usercollector_node" : self.usercollector.hostname(), "usercollector_port" : port_range, "usercollector_gsi_dn" :self.usercollector.x509_gsi_dn(), } data += """</collectors>""" return data
def config_security_data(self): data = """ %(indent1)s<security security_name="%(service_name)s" %(indent1)s proxy_selection_plugin="ProxyAll" %(indent1)s classad_proxy="%(x509_proxy)s" %(indent1)s proxy_DN="%(x509_gsi_dn)s"> %(indent2)s<proxies>""" % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), "service_name" : self.service_name(), "x509_proxy" : self.x509_proxy(), "x509_gsi_dn" : self.x509_gsi_dn(), } proxies = self.glidein_proxy_files() for proxy in proxies.split(" "): data = data + """ %(indent3)s<proxy security_class="frontend" absfname="%(proxy)s"/>""" % \ { "indent3" : common.indent(3), "proxy" : proxy } data = data + """ %(indent2)s</proxies> %(indent1)s</security>""" % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), } return data
def config_default_attr_data(self): indent = common.indent(1) data = """ %s<attrs>""" % (indent) indent = common.indent(2) if self.glidein.use_ccb() == "n": data = data + """ %s<attr name="USE_CCB" value="False" const="True" type="string" glidein_publish="True" publish="True" job_publish="False" parameter="True"/>""" % (indent) else: # Enable by default data = data + """ %s<attr name="USE_CCB" value="True" const="True" type="string" glidein_publish="True" publish="True" job_publish="False" parameter="True"/>""" % (indent) # -- glexec -- data = data + """ %(indent2)s<attr name="GLEXEC_JOB" value="True" const="True" type="string" glidein_publish="False" publish="True" job_publish="False" parameter="True"/> %(indent2)s<attr name="USE_MATCH_AUTH" value="True" const="False" type="string" glidein_publish="False" publish="True" job_publish="False" parameter="True"/> %(indent2)s<attr name="CONDOR_VERSION" value="default" const="False" type="string" glidein_publish="False" publish="True" job_publish="False" parameter="True"/> %(indent1)s</attrs> """ % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), } return data
def config_files_data(self): return """\ %(indent1)s<files> %(indent1)s</files> """ % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), }
def config_condor_data(self): return """ %(indent1)s<condor_tarballs> %(indent2)s<condor_tarball arch="default" os="default" base_dir="%(condor_location)s"/> %(indent1)s</condor_tarballs> """ % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), "condor_location" : self.wms.condor_location(), }
def config_groups_data(self,match_criteria): return """\ %(indent1)s<groups> %(indent2)s%(match_criteria)s %(indent1)s</groups> """ % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), "match_criteria" : match_criteria, }
def show(self): lines = [] lines.append('let') i = 0 for decl in self.declarations: lines.append(common.indent(decl.show(), 4)) i += 1 if i < len(self.declarations) and not decl.is_type_declaration(): lines.append('') lines.append(' in') lines.append(common.indent(self.body.show(), 4)) return '\n'.join(lines)
def config_monitor_data(self): indent = common.indent(1) return """ %(indent1)s<monitor base_dir="%(web_location)s/monitor" %(indent1)s javascriptRRD_dir="%(javascriptrrd)s" %(indent1)s flot_dir="%(flot)s" %(indent1)s jquery_dir="%(jquery)s"/>""" % \ { "indent1" : common.indent(1), "web_location" : self.glidein.web_location(), "javascriptrrd" : self.glidein.javascriptrrd_dir, "jquery" : self.glidein.jquery_dir, "flot" : self.glidein.flot_dir, }
def get_match_criteria(self): """ Determine the job constraints/matching criteria for submitting jobs.""" #-- factory attributes ---- print """ What glidein/factory attributres are you using in the match expression? I have computed my best estimate for your match string, please verify and correct if needed. """ default_factory_attributes = string.join(self.extract_factory_attrs(),',') factory_attributes = raw_input("Factory attributes: [%s] "%default_factory_attributes) if factory_attributes == "": factory_attributes = default_factory_attributes if factory_attributes == "": factory_attributes = [] else: factory_attributes = string.split(factory_attributes,',') #--- job_attributes -- print """ What job attributes are you using in the match expression? I have computed my best estimate for your match string, please verify and correct if needed. """ default_job_attributes = string.join(self.extract_job_attrs(),',') job_attributes = raw_input("Job attributes: [%s] " % default_job_attributes) if job_attributes == "": job_attributes = default_job_attributes if job_attributes == "": job_attributes = [] else: job_attributes = string.split(job_attributes,',') #--- create xml ---- data = """ %(indent2)s<group name="%(group_name)s" enabled="True"> %(indent3)s<match match_expr=%(match_string)s> %(factory_attributes)s %(job_attributes)s %(indent3)s</match> %(indent2)s</group> """ % \ { "indent2" : common.indent(2), "indent3" : common.indent(3), "indent4" : common.indent(4), "group_name" : self.group_name(), "match_string" : xmlFormat.xml_quoteattr(self.match_string()), "factory_attributes" : self.factory_data(factory_attributes), "job_attributes" : self.job_data(job_attributes), } return data
def config_security_data(self): if self.use_vofrontend_proxy() == "y": # disable factory proxy allow_proxy = "frontend" else: # allow both factory proxy and VO proxy allow_proxy = "factory,frontend" data = """ %(indent1)s<security allow_proxy="%(allow_proxy)s" key_length="2048" pub_key="RSA" > %(indent2)s<frontends>""" % \ { "indent1":common.indent(1), "indent2":common.indent(2), "allow_proxy": allow_proxy, } frontend_users_dict = self.wms.frontend_users() for frontend in frontend_users_dict.keys(): data = data + """ %(indent3)s<frontend name="%(frontend)s" identity="%(frontend)s@%(hostname)s"> %(indent4)s<security_classes> %(indent5)s<security_class name="frontend" username="******"/> """ % \ { "indent3" : common.indent(3), "indent4" : common.indent(4), "indent5" : common.indent(5), "frontend": frontend, "hostname" : self.hostname(), "frontend_user" : frontend_users_dict[frontend], } if self.use_vofrontend_proxy() == "n": data = data + """\ %(indent5)s<security_class name="factory" username="******"/> """ % \ { "indent5" : common.indent(5), "factory_user" : self.username(), } data = data + """ %(indent4)s</security_classes> %(indent3)s</frontend>""" % \ { "indent3" : common.indent(3), "indent4" : common.indent(4), } data = data + """ %(indent2)s</frontends> %(indent1)s</security>""" % \ { "indent1":common.indent(1), "indent2":common.indent(2), } return data
def config_entries_data(self): data = """\ %(indent1)s<entries>""" % { "indent1" : common.indent(1), } sorted_entry_names =self.config_entries_list.keys() sorted_entry_names.sort() for entry_name in sorted_entry_names: entry_el=self.config_entries_list[entry_name] if entry_el['rsl']!="": rsl_str='rsl=%s' % xml.sax.saxutils.quoteattr(entry_el['rsl']) else: rsl_str="" data = data + """ %(indent2)s<!-- %(entry_name)s --> %(indent2)s<entry name="%(entry_name)s" gridtype="%(gridtype)s" gatekeeper="%(gatekeeper)s" %(rsl)s work_dir="%(workdir)s"> %(indent3)s<infosys_refs> %(infosys_ref)s %(indent3)s</infosys_refs> %(indent3)s<attrs> %(indent4)s<attr name="GLIDEIN_Site" value="%(site_name)s" const="True" type="string" glidein_publish="True" publish="True" job_publish="True" parameter="True"/> %(indent4)s<attr name="CONDOR_OS" value="default" const="False" type="string" glidein_publish="False" publish="True" job_publish="False" parameter="True"/> %(indent4)s<attr name="CONDOR_ARCH" value="default" const="False" type="string" glidein_publish="False" publish="True" job_publish="False" parameter="True"/> %(indent4)s<attr name="GLEXEC_BIN" value="%(glexec_path)s" const="True" type="string" glidein_publish="False" publish="True" job_publish="False" parameter="True"/> %(ccb_attr)s %(indent3)s</attrs> %(indent3)s<files> %(indent3)s</files> %(indent2)s</entry> """ % { "indent2" : common.indent(2), "indent3" : common.indent(3), "indent4" : common.indent(4), "entry_name" : entry_name, "rsl" : rsl_str, "gridtype" : entry_el['gridtype'], "gatekeeper" : entry_el['gatekeeper'], "workdir" : entry_el['work_dir'], "infosys_ref" : self.entry_infosys_ref_data(entry_el['is_ids']), "ccb_attr" : self.entry_ccb_attrs(), "site_name" : entry_el['site_name'], "glexec_path" : entry_el['glexec_path'], } #--- end of entry element -- data = data + """%(indent1)s</entries> """ % \ { "indent1" : common.indent(1), } return data
def __str__(self) -> str: ret = '(variant {} {}\n'.format(self.uuid, self.norm) +\ ' {}\n'.format(self.name) +\ ' {}\n'.format(self.description) ret += '\n'.join(indent(1, str(self.gate).splitlines())) ret += '\n)' return ret
def show(self): lines = [self.lhs.show() + ' = ' + self.rhs.show()] if len(self.where) > 0: lines.append(' where') for decl in self.where: lines.append(common.indent(decl.show(), 4)) return '\n'.join(lines)
def config_work_data(self): return """ %(indent1)s<work base_dir="%(install_location)s" %(indent1)s base_log_dir="%(logs_dir)s"/>""" % \ { "indent1" : common.indent(1), "install_location" : self.glidein.install_location(), "logs_dir" : self.logs_dir(), }
def config_stage_data(self): return """ %(indent1)s<stage web_base_url="%(web_url)s/stage" %(indent1)s base_dir="%(web_location)s/stage"/>""" % \ { "indent1" : common.indent(1), "web_url" : self.glidein.web_url(), "web_location" : self.glidein.web_location(), }
def config_attrs_data(self): return """ %(indent1)s<attrs> %(indent2)s<attr name="GLIDEIN_Glexec_Use" value="%(glexec_use)s" glidein_publish="True" job_publish="True" parameter="False" type="string"/> %(indent2)s<attr name="GLIDEIN_Expose_Grid_Env" value="%(expose_grid_env)s" glidein_publish="True" job_publish="True" parameter="False" type="string"/> %(indent2)s<attr name="USE_MATCH_AUTH" value="True" glidein_publish="False" job_publish="False" parameter="True" type="string"/> %(indent2)s<attr name="GLIDECLIENT_Rank" value="%(entry_rank)s" glidein_publish="False" job_publish="False" parameter="True" type="string"/> %(indent1)s</attrs> """ % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), "indent3" : common.indent(3), "glexec_use" : self.glexec_use(), "expose_grid_env" : self.expose_grid_env(), "entry_start" : "True", "entry_rank" : "1", }
def config_stage_data(self): return """ %(indent1)s<stage web_base_url="%(web_url)s/%(web_dir)s/stage" %(indent1)s use_symlink="True" %(indent1)s base_dir="%(web_location)s/stage"/>""" % \ { "indent1" : common.indent(1), "web_url" : self.glidein.web_url(), "web_location" : self.glidein.web_location(), "web_dir" : os.path.basename(self.glidein.web_location()), }
def indent_entity(entity: Any) -> str: """indent an entity and add trailing newline >>> indent_entity('(foo "1")') ' (foo "1")\\n' >>> indent_entity('(bar "2"\\n (baz "3")\\n)') ' (bar "2"\\n (baz "3")\\n )\\n' """ result = '\n'.join(indent(1, str(entity).splitlines())) result += '\n' return result
def entry_infosys_ref_data(self,is_els): data = "" for is_el in is_els: data = data + """%(indent4)s<infosys_ref type="%(type)s" server="%(server)s" ref="%(name)s"/> """ % \ { "indent4" : common.indent(4), "type" : is_el['type'], "server" : is_el['server'], "name" : is_el['name'], } return data
def config_submit_data(self): return """ %(indent1)s<submit base_dir="%(install_location)s" %(indent1)s base_log_dir="%(factory_logs)s" %(indent1)s base_client_log_dir="%(client_log_dir)s" %(indent1)s base_client_proxies_dir="%(client_proxy_dir)s"/> """ % \ { "indent1" : common.indent(1), "install_location" : self.install_location(), "factory_logs" : self.logs_dir(), "client_log_dir" : self.client_log_dir(), "client_proxy_dir" : self.client_proxy_dir(), }
def config_security_data(self): data = """ %(indent1)s<security key_length="2048" pub_key="RSA" > %(indent2)s<frontends>""" % \ { "indent1":common.indent(1), "indent2":common.indent(2), } frontend_users_dict = self.wms.frontend_users() for frontend in frontend_users_dict.keys(): data = data + """ %(indent3)s<frontend name="%(frontend)s" identity="%(frontend)s@%(hostname)s"> %(indent4)s<security_classes> %(indent5)s<security_class name="frontend" username="******"/> """ % \ { "indent3" : common.indent(3), "indent4" : common.indent(4), "indent5" : common.indent(5), "frontend": frontend, "hostname" : self.hostname(), "frontend_user" : frontend_users_dict[frontend], } data = data + """ %(indent4)s</security_classes> %(indent3)s</frontend>""" % \ { "indent3" : common.indent(3), "indent4" : common.indent(4), } data = data + """ %(indent2)s</frontends> %(indent1)s</security>""" % \ { "indent1":common.indent(1), "indent2":common.indent(2), } return data
def config_match_data(self,schedds): data = """ %(indent1)s<match> %(indent2)s<factory> %(indent3)s<collectors> %(indent4)s<collector node="%(wms_node)s:%(wms_collector_port)s" DN="%(wms_gsi_gn)s" factory_identity="%(factory_username)s@%(wms_node)s" my_identity="%(frontend_identity)s@%(wms_node)s" comment="Define factory collectors globally for simplicity"/> %(indent3)s</collectors> %(indent2)s</factory> %(indent2)s<job query_expr=%(job_constraints)s comment="Define job constraint and schedds globally for simplicity"> %(indent3)s<schedds>""" % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), "indent3" : common.indent(3), "indent4" : common.indent(4), "wms_node" : self.wms.hostname(), "wms_collector_port": self.wms.collector_port(), "wms_gsi_gn" : self.wms.x509_gsi_dn(), "factory_username" : self.factory.username(), "frontend_identity" : self.service_name(), "job_constraints" : xmlFormat.xml_quoteattr(self.userjob_constraints()), } for schedd in schedds: data = data + """ %(indent4)s<schedd fullname="%(schedd)s" DN="%(submit_gsi_dn)s"/>""" % \ { "indent4" : common.indent(4), "schedd" : schedd, "submit_gsi_dn" : self.submit.x509_gsi_dn() } data = data + """ %(indent3)s</schedds> %(indent2)s</job> %(indent1)s</match> """ % \ { "indent1" : common.indent(1), "indent2" : common.indent(2), "indent3" : common.indent(3), } return data
def write(self, fields, queries, cpp=None, cpp_header=None, cpp_class="DataStructure", cpp_record_class="Record", cpp_abstract_record=False, cpp_extra=None, cpp_namespace=None, **kwargs): self.cpp_record_class = cpp_record_class self.cpp_abstract_record = cpp_abstract_record self.fields = fields with open_maybe_stdout(cpp) as outfile: with open_maybe_stdout(cpp_header) as header_outfile: writer = outfile.write header_writer = header_outfile.write # --------------------------------------------------------------------- # HEADER guard = "HEADER_{}".format(fresh_name()) header_writer("#ifndef {}\n".format(guard)) header_writer("#define {} 1\n".format(guard)) header_writer("\n") if cpp_extra: header_writer("{}\n".format(cpp_extra)) header_writer("#include <cassert>\n") header_writer("#include <ctgmath>\n") # header_writer("#include <vector>\n") header_writer("#include <unordered_map>\n") header_writer("#include <map>\n") header_writer("#include <functional>\n") header_writer("#include <algorithm>\n") if self.with_qt: header_writer("#include <QHash>\n") header_writer(""" #include <cstdint> template <class T> class mystk { int32_t _end; static int32_t _cap; static T* _data; public: mystk() : _end(-1) { } void reserve(size_t n) { } bool empty() { return _end < 0; } T& back() { return _data[_end]; } void push_back(const T& x) { ++_end; if (_end >= _cap) { _cap *= 2; T* newdata = new T[_cap]; std::copy(_data, _data + _end, newdata); delete[] _data; _data = newdata; } // printf("inserting %p @ %d\\n", x, (int)_end); _data[_end] = x; } void pop_back() { --_end; } }; template<class T> int32_t mystk<T>::_cap = 10; template<class T> T* mystk<T>::_data = new T[10]; template <class T> class myarr { T* data; int length; public: myarr() : data(nullptr), length(0) { } myarr(int n) : data(new T[n]), length(n) { } myarr(const myarr& other) : data(new T[other.length]), length(other.length) { std::copy(other.data, other.data + other.length, data); } myarr(myarr&& other) : data(other.data), length(other.length) { other.data = nullptr; } myarr& operator=(const myarr& other) { if (this != &other) { length = other.length; data = new T[other.length]; std::copy(other.data, other.data + other.length, data); } return *this; } myarr& operator=(myarr&& other) { if (this != &other) { length = other.length; std::swap(data, other.data); } return *this; } ~myarr() { if (data != nullptr) delete[] data; } T& operator[](int n) { return data[n]; } const T& operator[](int n) const { return data[n]; } int size() const { return length; } T* begin() { return data; } T* end() { return data + length; } }; template <class T> bool operator==(const myarr<T>& lhs, const myarr<T>& rhs) { if (lhs.size() != rhs.size()) return false; for (int i = 0; i < lhs.size(); ++i) { if (lhs[i] != rhs[i]) return false; } return true; } template <class T> bool operator<(const myarr<T>& lhs, const myarr<T>& rhs) { if (lhs.size() < rhs.size()) return true; if (lhs.size() > rhs.size()) return false; for (int i = 0; i < lhs.size(); ++i) { if (lhs[i] < rhs[i]) return true; if (lhs[i] > rhs[i]) return false; } return false; } template <class T> bool operator!=(const myarr<T>& lhs, const myarr<T>& rhs) { return !(lhs == rhs); } template <class T> bool operator>=(const myarr<T>& lhs, const myarr<T>& rhs) { return !(lhs < rhs); } template <class T> bool operator>(const myarr<T>& lhs, const myarr<T>& rhs) { return (lhs != rhs) && (lhs >= rhs); } template <class T> bool operator<=(const myarr<T>& lhs, const myarr<T>& rhs) { return !(lhs > rhs); } """) header_writer("\n") if cpp_namespace is not None: header_writer("namespace {} {{\n".format(cpp_namespace)) # forward decls header_writer("class {};\n".format(cpp_record_class)) header_writer("class {};\n".format(cpp_class)) header_writer("\n") # auxiliary type definitions seen = set() for q in queries: for t in q.impl.auxtypes(): _gen_aux_type_fwd_decl(t, self, header_writer, seen) seen = set() for q in queries: for t in q.impl.auxtypes(): _gen_aux_type_header(t, self, header_writer, cpp_class, seen) # record type private_members = [] for q in queries: private_members += list((f, ty.gen_type(self)) for f, ty in q.impl.private_members()) self.private_members = private_members if cpp_abstract_record: header_writer("struct PrivateData {\n") for name, ty in private_members: header_writer(" {} {};\n".format(ty, name)) header_writer("};\n") for name, ty in list(fields.items()): header_writer("inline {}& read_{}({}); /* MUST BE IMPLEMENTED BY CLIENT */\n".format(ty, name, self.record_type())) header_writer("inline PrivateData& read_private_data({}); /* MUST BE IMPLEMENTED BY CLIENT */\n".format(self.record_type())) else: _gen_record_type(cpp_record_class, list(fields.items()), private_members, header_writer) header_writer("\n") header_writer("class {} {{\n".format(cpp_class)) header_writer("public:\n") # constructor header_writer(" inline {}();\n".format(cpp_class)) # get current size header_writer(" inline size_t size() const;\n") # add routine header_writer(" inline void add({} x);\n".format(self.record_type())) # remove routine header_writer(" inline void remove({} x);\n".format(self.record_type())) # update routines for f, ty in fields.items(): header_writer(" inline void update{}({} x, {} val);\n".format(capitalize(f), self.record_type(), ty)) header_writer(" inline void update({} x, {});\n".format(self.record_type(), ", ".join("{} {}".format(ty, f) for f, ty in fields.items()))) # query routines for q in queries: it_name = "{}_iterator".format(q.name) vars_needed = [(v, ty) for v, ty in q.vars if q.impl.needs_var(v)] # iterator class header_writer(" class {} {{\n".format(it_name)) header_writer(" friend class {};\n".format(cpp_class)) header_writer(" public:\n") header_writer(" inline bool hasNext();\n") header_writer(" inline {}* next();\n".format(cpp_record_class)) header_writer(" inline void remove();\n") header_writer(" private:\n") state = q.impl.state() header_writer(" {}* parent;\n".format(cpp_class)) vars_needed = [(v, ty) for v, ty in q.vars if q.impl.needs_var(v)] for v, ty in vars_needed: header_writer(" {} {};\n".format(ty, v)) for f, ty in state: header_writer(" {} {};\n".format(ty.gen_type(self), f)) header_writer(" inline {}({}* parent{}{});\n".format(it_name, cpp_class, "".join(", {} {}".format(ty, v) for v, ty in vars_needed), "".join(", {} {}".format(ty.gen_type(self), f) for f, ty in state))) header_writer(" };\n") # query method header_writer(" inline {} {}({});\n".format(it_name, q.name, ", ".join("{} {}".format(ty, v) for v,ty in q.vars))) header_writer(" inline {} {}_1({});\n".format(self.record_type(), q.name, ", ".join("{} {}".format(ty, v) for v,ty in q.vars))) # debugging header_writer(" inline void checkRep();\n") # private members header_writer("private:\n") header_writer(" size_t my_size;\n") for q in queries: for f, ty in q.impl.fields(): header_writer(" {} {};\n".format(ty.gen_type(self), f)) header_writer("};\n") if cpp_namespace is not None: header_writer("}\n") header_writer("\n") # --------------------------------------------------------------------- # CODE name = cpp_class if cpp_namespace is None else "{}::{}".format(cpp_namespace, cpp_class) # writer("#include \"DataStructure.hpp\"\n") writer = header_writer # constructor writer("{}::{}() : my_size(0) {{\n".format(name, cpp_class)) for q in queries: writer(indent(" ", q.impl.construct(self, This()))) writer("}\n") # size writer("size_t {}::size() const {{ return my_size; }}\n".format(name)) # add routine writer("void {}::add({} x) {{\n".format(name, self.record_type())) writer(" ++my_size;\n") for q in queries: writer(indent(" ", q.impl.gen_insert(self, "x", This()))) writer("}\n") # remove routine writer("void {}::remove({} x) {{\n".format(name, self.record_type())) writer(" --my_size;\n") for q in queries: writer(indent(" ", q.impl.gen_remove(self, "x", This()))) writer("}\n") # update routines for f, ty in fields.items(): writer("void {}::update{}({} x, {} val) {{\n".format(name, capitalize(f), self.record_type(), ty)) writer(" if ({} != val) {{\n".format(self.get_field("x", f))) for q in queries: writer(indent(" ", q.impl.gen_update(self, fields, "x", {f: "val"}, This()))) writer(" {} = val;\n".format(self.get_field("x", f))) writer(" }") writer("}\n") writer("void {}::update({} x, {}) {{\n".format(name, self.record_type(), ", ".join("{} {}".format(ty, f) for f, ty in fields.items()))) for q in queries: writer(indent(" ", q.impl.gen_update(self, fields, "x", {f:f for f in fields}, This()))) for f, ty in fields.items(): writer(" {} = {};\n".format(self.get_field("x", f), f)) writer("}\n") # query routines for q in queries: vars_needed = [(v, ty) for v, ty in q.vars if q.impl.needs_var(v)] state = q.impl.state() # query call writer("{prefix}::{q}_iterator {prefix}::{q}({}) {{\n".format(", ".join("{} {}".format(ty, v) for v,ty in q.vars), prefix=name, q=q.name)) proc, stateExps = q.impl.gen_query(self, q.vars, This()) writer(indent(" ", proc)) writer(" return {}_iterator(this{}{});\n".format(q.name, "".join(", {}".format(v) for v, ty in vars_needed), "".join(", {}".format(e) for e in stateExps))) writer(" }\n") # iterator constructor writer("{prefix}::{q}_iterator::{q}_iterator({}* _parent{}{}) :\n".format(cpp_class, "".join(", {} _{}".format(ty, v) for v, ty in vars_needed), "".join(", {} _{}".format(ty.gen_type(self), f) for f, ty in state), prefix=name, q=q.name)) writer(" parent(_parent){}{}\n".format("".join(", {f}(_{f})".format(f=v) for v, ty in vars_needed), "".join(", {f}(_{f})".format(f=v) for v, ty in state))) writer("{ }\n") # hasNext writer("bool {prefix}::{q}_iterator::hasNext() {{\n".format(prefix=name, q=q.name)) proc, ret = q.impl.gen_has_next(self, parent_structure=TupleInstance("parent"), iterator=This()) writer(indent(" ", proc)) writer(" return {};\n".format(ret)) writer("}\n") # next writer("{} {prefix}::{q}_iterator::next() {{\n".format(self.record_type(), prefix=name, q=q.name)) proc, ret = q.impl.gen_next(self, parent_structure=TupleInstance("parent"), iterator=This()) writer(indent(" ", proc)) writer(" return {};\n".format(ret)) writer("}\n") # remove writer("void {prefix}::{q}_iterator::remove() {{\n".format(prefix=name, q=q.name)) writer(" --(parent->my_size);\n") proc, removed = q.impl.gen_remove_in_place(self, parent_structure=TupleInstance("parent"), iterator=This()) writer(indent(" ", proc)) for q2 in queries: if q2 != q: writer(indent(" ", q2.impl.gen_remove(self, removed, parent_structure=TupleInstance("parent")))) writer("}\n") # singular query call writer("{rt} {prefix}::{q}_1({}) {{\n".format(", ".join("{} {}".format(ty, v) for v,ty in q.vars), rt=self.record_type(), prefix=name, q=q.name)) writer(" if (my_size == 0) { return nullptr; }\n") proc, result = q.impl.gen_query_one(self, q.vars, This()) writer(indent(" ", proc)) writer(" return {};\n".format(result)) writer("}\n") writer("void {}::checkRep() {{\n".format(name)) for q in queries: writer(indent(" ", q.impl.check_rep(self, This()))) writer("}\n") header_writer("#endif\n")
def add_footprint_variant( key: str, name: str, density_level: str, ) -> None: uuid_footprint = _uuid('footprint-{}'.format(key)) uuid_silkscreen_top = _uuid('polygon-silkscreen-{}'.format(key)) uuid_silkscreen_bot = _uuid('polygon-silkscreen2-{}'.format(key)) uuid_outline = _uuid('polygon-outline-{}'.format(key)) uuid_courtyard = _uuid('polygon-courtyard-{}'.format(key)) uuid_text_name = _uuid('text-name-{}'.format(key)) uuid_text_value = _uuid('text-value-{}'.format(key)) # Max boundaries (pads or body) max_x = 0.0 max_y = 0.0 # Max boundaries (copper only) max_y_copper = 0.0 lines.append(' (footprint {}'.format(uuid_footprint)) lines.append(' (name "{}")'.format(name)) lines.append(' (description "")') # Pad excess according to IPC density levels pad_heel = get_by_density(pitch, density_level, 'heel') pad_toe = get_by_density(pitch, density_level, 'toe') pad_side = get_by_density(pitch, density_level, 'side') # Pads pad_width = lead_width + pad_side pad_length = lead_contact_length + pad_heel + pad_toe pad_x_offset = total_width / 2 - lead_contact_length / 2 - pad_heel / 2 + pad_toe / 2 for p in range(1, pin_count + 1): mid = pin_count // 2 if p <= mid: y = get_y(p, pin_count // 2, pitch, False) pxo = ff(-pad_x_offset) else: y = -get_y(p - mid, pin_count // 2, pitch, False) pxo = ff(pad_x_offset) pad_uuid = uuid_pads[p - 1] lines.append( ' (pad {} (side top) (shape rect)'.format(pad_uuid)) lines.append( ' (position {} {}) (rotation 0.0) (size {} {}) (drill 0.0)' .format( pxo, ff(y), ff(pad_length), ff(pad_width), )) lines.append(' )') max_y_copper = max(max_y_copper, y + pad_width / 2) max_x = max(max_x, total_width / 2 + pad_toe) # Documentation: Leads lead_contact_x_offset = total_width / 2 - lead_contact_length # this is the inner side of the contact area for p in range(1, pin_count + 1): mid = pin_count // 2 if p <= mid: # left side y = get_y(p, pin_count // 2, pitch, False) lcxo_max = ff(-lead_contact_x_offset - lead_contact_length) lcxo_min = ff(-lead_contact_x_offset) body_side = ff(-body_width / 2) else: # right side y = -get_y(p - mid, pin_count // 2, pitch, False) lcxo_min = ff(lead_contact_x_offset) lcxo_max = ff(lead_contact_x_offset + lead_contact_length) body_side = ff(body_width / 2) y_max = ff(y - lead_width / 2) y_min = ff(y + lead_width / 2) lead_uuid_ctct = uuid_leads1[p - 1] # Contact area lead_uuid_proj = uuid_leads2[p - 1] # Vertical projection # Contact area lines.append(' (polygon {} (layer top_documentation)'.format( lead_uuid_ctct)) lines.append(' (width 0.0) (fill true) (grab_area false)') lines.append(' (vertex (position {} {}) (angle 0.0))'.format( lcxo_min, y_max)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( lcxo_max, y_max)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( lcxo_max, y_min)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( lcxo_min, y_min)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( lcxo_min, y_max)) lines.append(' )') # Vertical projection, between contact area and body lines.append(' (polygon {} (layer top_documentation)'.format( lead_uuid_proj)) lines.append(' (width 0.0) (fill true) (grab_area false)') lines.append(' (vertex (position {} {}) (angle 0.0))'.format( body_side, y_max)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( lcxo_min, y_max)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( lcxo_min, y_min)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( body_side, y_min)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( body_side, y_max)) lines.append(' )') # Silkscreen (fully outside body) # Ensure minimum clearance between copper and silkscreen y_offset = max( silkscreen_offset - (body_length / 2 - max_y_copper), 0) y_max = ff(body_length / 2 + line_width / 2 + y_offset) y_min = ff(-body_length / 2 - line_width / 2 - y_offset) short_x_offset = body_width / 2 - line_width / 2 long_x_offset = total_width / 2 - line_width / 2 + pad_toe # Pin1 marking lines.append(' (polygon {} (layer top_placement)'.format( uuid_silkscreen_top)) lines.append(' (width {}) (fill false) (grab_area false)'.format( line_width)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( ff(-long_x_offset), y_max)) # noqa lines.append(' (vertex (position {} {}) (angle 0.0))'.format( ff(short_x_offset), y_max)) # noqa lines.append(' )') lines.append(' (polygon {} (layer top_placement)'.format( uuid_silkscreen_bot)) lines.append(' (width {}) (fill false) (grab_area false)'.format( line_width)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( ff(-short_x_offset), y_min)) # noqa lines.append(' (vertex (position {} {}) (angle 0.0))'.format( ff(short_x_offset), y_min)) # noqa lines.append(' )') # Documentation outline (fully inside body) outline_x_offset = body_width / 2 - line_width / 2 lines.append( ' (polygon {} (layer top_documentation)'.format(uuid_outline)) lines.append(' (width {}) (fill false) (grab_area true)'.format( line_width)) y_max = ff(body_length / 2 - line_width / 2) y_min = ff(-body_length / 2 + line_width / 2) oxo = ff(outline_x_offset) # Used for shorter code lines below :) lines.append(' (vertex (position -{} {}) (angle 0.0))'.format( oxo, y_max)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( oxo, y_max)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( oxo, y_min)) lines.append(' (vertex (position -{} {}) (angle 0.0))'.format( oxo, y_min)) lines.append(' (vertex (position -{} {}) (angle 0.0))'.format( oxo, y_max)) lines.append(' )') max_y = max(max_y, body_length / 2) # Body contour # Courtyard courtyard_excess = get_by_density(pitch, density_level, 'courtyard') lines.extend( indent( 2, generate_courtyard( uuid=uuid_courtyard, max_x=max_x, max_y=max_y, excess_x=courtyard_excess, excess_y=courtyard_excess, ))) # Labels y_max = ff(body_length / 2 + 1.27) y_min = ff(-body_length / 2 - 1.27) text_attrs = '(height {}) (stroke_width 0.2) ' \ '(letter_spacing auto) (line_spacing auto)'.format(pkg_text_height) lines.append( ' (stroke_text {} (layer top_names)'.format(uuid_text_name)) lines.append(' {}'.format(text_attrs)) lines.append( ' (align center bottom) (position 0.0 {}) (rotation 0.0)'. format(y_max)) lines.append( ' (auto_rotate true) (mirror false) (value "{{NAME}}")') lines.append(' )') lines.append( ' (stroke_text {} (layer top_values)'.format(uuid_text_value)) lines.append(' {}'.format(text_attrs)) lines.append( ' (align center top) (position 0.0 {}) (rotation 0.0)'. format(y_min)) lines.append( ' (auto_rotate true) (mirror false) (value "{{VALUE}}")') lines.append(' )') lines.append(' )')
def parse_variable_tables(filename): """Parses metadata tables on the host model side that define the available variables. Metadata tables can refer to variables inside a module or as part of a derived datatype, which itself is defined inside a module (depending on the location of the metadata table). Each variable (standard_name) can exist only once, i.e. each entry (list of variables) in the metadata dictionary contains only one element (variable = instance of class Var defined in mkcap.py)""" # Set debug to true if logging level is debug debug = logging.getLogger().getEffectiveLevel() == logging.DEBUG # Final metadata container for all variables in file metadata = collections.OrderedDict() # Registry of modules and derived data types in file registry = collections.OrderedDict() # Read all lines of the file at once with (open(filename, 'r')) as file: file_lines = file.readlines() lines = [] buffer = '' for i in xrange(len(file_lines)): line = file_lines[i].rstrip('\n').strip() # Skip empty lines if line == '' or line == '&': continue # Remove line continuations: concatenate with following lines if line.endswith('&'): buffer += file_lines[i].rstrip('\n').replace('&', ' ') continue # Write out line with buffer and reset buffer lines.append(buffer + file_lines[i].rstrip('\n').replace('&', ' ')) buffer = '' del file_lines # Find all modules within the file, and save the start and end lines module_lines = {} line_counter = 0 for line in lines: words = line.split() if len(words) > 1 and words[0].lower() == 'module' and not words[1].lower() == 'procedure': module_name = words[1].strip() if module_name in registry.keys(): raise Exception('Duplicate module name {0}'.format(module_name)) registry[module_name] = {} module_lines[module_name] = { 'startline' : line_counter } elif len(words) > 1 and words[0].lower() == 'end' and words[1].lower() == 'module': try: test_module_name = words[2] except IndexError: logging.warning('Encountered closing statement "end module" without module name; assume module_name is {0}'.format(module_name)) test_module_name = module_name if not module_name == test_module_name: raise Exception('Module names in opening/closing statement do not match: {0} vs {1}'.format(module_name, test_module_name)) module_lines[module_name]['endline'] = line_counter line_counter += 1 # Parse each module in the file separately for module_name in registry.keys(): startline = module_lines[module_name]['startline'] endline = module_lines[module_name]['endline'] line_counter = 0 in_type = False for line in lines[startline:endline]: current_line_number = startline + line_counter words = line.split() for j in range(len(words)): # Check for the word 'type', that it is the first word in the line, # and that a name exists afterwards. It is assumed that definitions # (not usage) of derived types cannot be nested - reasonable for Fortran. if words[j].lower() == 'type' and j == 0 and len(words) > 1 and not '(' in words[j+1]: if in_type: raise Exception('Nested definitions of derived types not supported') in_type = True type_name = words[j+1].split('(')[0].strip() if type_name in registry[module_name].keys(): raise Exception('Duplicate derived type name {0} in module {1}'.format( type_name, module_name)) registry[module_name][type_name] = [current_line_number] elif words[j].lower() == 'type' and j == 1 and words[j-1].lower() == 'end': if not in_type: raise Exception('Encountered "end_type" without corresponding "type" statement') try: test_type_name = words[j+1] except IndexError: logging.warning('Encountered closing statement "end type" without type name; assume type_name is {0}'.format(type_name)) test_type_name = type_name if not type_name == test_type_name: raise Exception('Type names in opening/closing statement do not match: {0} vs {1}'.format(type_name, test_type_name)) in_type = False registry[module_name][type_name].append(current_line_number) line_counter += 1 logging.debug('Parsing file {0} with registry {1}'.format(filename, registry)) # Variables can either be defined at module-level or in derived types - alongside with their tables line_counter = 0 in_table = False in_type = False for line in lines[startline:endline]: current_line_number = startline + line_counter # Check for beginning of new table words = line.split() # This is case sensitive if len(words) > 2 and words[0] in ['!!', '!>'] and '\section' in words[1] and 'arg_table_' in words[2]: if in_table: raise Exception('Encountered table start for table {0} while still in table {1}'.format(words[2].replace('arg_table_',''), table_name)) table_name = words[2].replace('arg_table_','') if not (table_name == module_name or table_name in registry[module_name].keys()): raise Exception('Encountered table with name {0} without corresponding module or type name'.format(table_name)) in_table = True header_line_number = current_line_number + 1 line_counter += 1 continue elif (words[0].startswith('!!') or words[0].startswith('!>')) and '\section' in words[0]: raise Exception("Malformatted table found in {0} / {1} / {2}".format(filename, module_name, table_name)) # If an argument table is found, parse it if in_table: words = line.split('|') # Separate the table headers if current_line_number == header_line_number: # Check for blank table if len(words) <= 1: logging.debug('Skipping blank table {0}'.format(table_name)) in_table = False line_counter += 1 continue table_header = [x.strip() for x in words[1:-1]] # Check that only valid table headers are used for item in table_header: if not item in VALID_ITEMS['header']: raise Exception('Invalid column header {0} in argument table {1}'.format(item, table_name)) # Locate mandatory column 'standard_name' try: standard_name_index = table_header.index('standard_name') except ValueError: raise Exception('Mandatory column standard_name not found in argument table {0}'.format(table_name)) line_counter += 1 continue elif current_line_number == header_line_number + 1: # Skip over separator line line_counter += 1 continue else: if len(words) == 1: # End of table if words[0].strip() == '!!': in_table = False line_counter += 1 continue else: raise Exception('Encountered invalid line "{0}" in argument table {1}'.format(line, table_name)) else: var_items = [x.strip() for x in words[1:-1]] if not len(var_items) == len(table_header): raise Exception('Error parsing variable entry "{0}" in argument table {1}'.format(var_items, table_name)) var_name = var_items[standard_name_index] # Skip variables without a standard_name (i.e. empty cell in column standard_name) if var_name: var = Var.from_table(table_header,var_items) if table_name == module_name: container = encode_container(module_name) else: container = encode_container(module_name, table_name) var.container = container # Check for incompatible definitions with CCPP mandatory variables if var_name in CCPP_MANDATORY_VARIABLES.keys() and not CCPP_MANDATORY_VARIABLES[var_name].compatible(var): raise Exception('Entry for variable {0}'.format(var_name) + \ ' in argument table {0}'.format(table_name) +\ ' is incompatible with mandatory variable:\n' +\ ' existing: {0}\n'.format(CCPP_MANDATORY_VARIABLES[var_name].print_debug()) +\ ' vs. new: {0}'.format(var.print_debug())) # Add variable to metadata dictionary if not var_name in metadata.keys(): metadata[var_name] = [var] else: for existing_var in metadata[var_name]: if not existing_var.compatible(var): raise Exception('New entry for variable {0}'.format(var_name) + \ ' in argument table {0}'.format(table_name) +\ ' is incompatible with existing entry:\n' +\ ' existing: {0}\n'.format(existing_var.print_debug()) +\ ' vs. new: {0}'.format(var.print_debug())) metadata[var_name].append(var) #else: # logging.debug('Skipping variable entry "{0}" without a standard_name'.format(var_items)) line_counter += 1 # Informative output to screen if debug and len(metadata.keys()) > 0: for module_name in registry.keys(): logging.debug('Module name: {0}'.format(module_name)) container = encode_container(module_name) vars_in_module = [] for var_name in metadata.keys(): for var in metadata[var_name]: if var.container == container: vars_in_module.append(var_name) logging.debug('Module variables: {0}'.format(', '.join(vars_in_module))) for type_name in registry[module_name].keys(): container = encode_container(module_name, type_name) vars_in_type = [] for var_name in metadata.keys(): for var in metadata[var_name]: if var.container == container: vars_in_type.append(var_name) logging.debug('Variables in derived type {0}: {1}'.format(type_name, ', '.join(vars_in_type))) if debug and len(metadata.keys()) > 0: # Write out the XML for debugging purposes top = ET.Element('definition') top.set('module', module_name) container = encode_container(module_name) for var_name in metadata.keys(): for var in metadata[var_name]: if var.container == container: sub_var = var.to_xml(ET.SubElement(top, 'variable')) for type_name in registry[module_name].keys(): container = encode_container(module_name, type_name) sub_type = ET.SubElement(top, 'type') sub_type.set('type_name', type_name) for var_name in metadata.keys(): for var in metadata[var_name]: if var.container == container: sub_var = var.to_xml(ET.SubElement(sub_type, 'variable')) indent(top) tree = ET.ElementTree(top) xmlfile = module_name + '.xml' tree.write(xmlfile, xml_declaration=True, encoding='utf-8', method="xml") logging.info('Parsed variable definition tables in module {0}; output => {1}'.format(module_name, xmlfile)) elif len(metadata.keys()) > 0: logging.info('Parsed variable definition tables in module {0}'.format(module_name)) return metadata
def entry_ccb_attrs(self): data = "" if self.glidein.use_ccb() == "y": # Put USE_CCB in the entries so that it is easy to disable it selectively data = data + """%s<attr name="USE_CCB" value="True" const="True" type="string" glidein_publish="True" publish="True" job_publish="False" parameter="True"/>""" % (common.indent(1)) return data
def parse_scheme_tables(filename): """Parses metadata tables for a physics scheme that requests/requires variables as input arguments. Metadata tables can only describe variables required by a subroutine 'subroutine_name' of scheme 'scheme_name' inside a module 'module_name'. Each variable (standard_name) can exist only once, i.e. each entry (list of variables) in the metadata dictionary contains only one element (variable = instance of class Var defined in mkcap.py). The metadata dictionaries of the individual schemes are merged afterwards (called from ccpp_prebuild.py) using merge_metadata_dicts, where multiple instances of variables are compared for compatibility and collected in a list (entry in the merged metadata dictionary). The merged metadata dictionary of all schemes (which contains only compatible variable instances in the list referred to by standard_name) is then compared to the unique definition in the metadata dictionary of the variables provided by the host model using compare_metadata in ccpp_prebuild.py.""" # Set debug to true if logging level is debug debug = logging.getLogger().getEffectiveLevel() == logging.DEBUG # Valid suffices for physics scheme routines subroutine_suffices = [ 'init', 'run', 'finalize'] # Final metadata container for all variables in file metadata = collections.OrderedDict() # Registry of modules and derived data types in file #registry = {} registry = collections.OrderedDict() # Argument lists of each subroutine in the file arguments = collections.OrderedDict() # Read all lines of the file at once with (open(filename, 'r')) as file: file_lines = file.readlines() lines = [] original_line_numbers = [] buffer = '' for i in xrange(len(file_lines)): line = file_lines[i].rstrip('\n').strip() # Skip empty lines if line == '' or line == '&': continue # Remove line continuations: concatenate with following lines if line.endswith('&'): buffer += file_lines[i].rstrip('\n').replace('&', ' ') continue # Write out line with buffer and reset buffer lines.append(buffer + file_lines[i].rstrip('\n').replace('&', ' ')) original_line_numbers.append(i+1) buffer = '' del file_lines # Find all modules within the file, and save the start and end lines module_lines = {} line_counter = 0 for line in lines: words = line.split() if len(words) > 1 and words[0].lower() == 'module' and not words[1].lower() == 'procedure': module_name = words[1].strip() if module_name in registry.keys(): raise Exception('Duplicate module name {0}'.format(module_name)) registry[module_name] = {} module_lines[module_name] = { 'startline' : line_counter } elif len(words) > 1 and words[0].lower() == 'end' and words[1].lower() == 'module': try: test_module_name = words[2] except IndexError: logging.warning('Warning, encountered closing statement "end module" without module name; assume module_name is {0}'.format(module_name)) test_module_name = module_name if not module_name == test_module_name: raise Exception('Module names in opening/closing statement do not match: {0} vs {1}'.format(module_name, test_module_name)) module_lines[module_name]['endline'] = line_counter line_counter += 1 # Parse each module in the file separately for module_name in registry.keys(): startline = module_lines[module_name]['startline'] endline = module_lines[module_name]['endline'] line_counter = 0 in_subroutine = False for line in lines[startline:endline]: current_line_number = startline + line_counter words = line.split() for j in range(len(words)): # Check for the word 'subroutine', that it is the first word in the line, # and that a name exists afterwards. Nested subroutines are ignored. if words[j].lower() == 'subroutine' and j == 0 and len(words) > 1: if in_subroutine: logging.debug('Warning, ignoring nested subroutine in module {0} and subroutine {1}'.format(module_name, subroutine_name)) continue subroutine_name = words[j+1].split('(')[0].strip() # Consider the last substring separated by a '_' of the subroutine name as a 'postfix' if subroutine_name.find('_') >= 0: subroutine_suffix = subroutine_name.split('_')[-1] if subroutine_suffix in subroutine_suffices: scheme_name = subroutine_name[0:subroutine_name.rfind('_')] if not scheme_name == module_name: raise Exception('Scheme name differs from module name: module_name="{0}" vs. scheme_name="{1}"'.format( module_name, scheme_name)) if not scheme_name in registry[module_name].keys(): registry[module_name][scheme_name] = {} if subroutine_name in registry[module_name][scheme_name].keys(): raise Exception('Duplicate subroutine name {0} in module {1}'.format( subroutine_name, module_name)) registry[module_name][scheme_name][subroutine_name] = [current_line_number] in_subroutine = True elif words[j].lower() == 'subroutine' and j == 1 and words[j-1].lower() == 'end': try: test_subroutine_name = words[j+1] except IndexError: logging.warning('Warning, encountered closing statement "end subroutine" without subroutine name; ' +\ ' assume subroutine_name is {0}'.format(subroutine_name)) test_subroutine_name = subroutine_name if in_subroutine and subroutine_name == test_subroutine_name: in_subroutine = False registry[module_name][scheme_name][subroutine_name].append(current_line_number) # Avoid problems by enforcing end statements to carry a descriptor (subroutine, module, ...) elif in_subroutine and len(words) == 1 and words[0].lower() == 'end': raise Exception('Encountered closing statement "end" without descriptor (subroutine, module, ...): ' +\ 'line {0}="{1}" in file {2}'.format(original_line_numbers[current_line_number], line, filename)) line_counter += 1 # Check that for each registered subroutine the start and end lines were found for scheme_name in registry[module_name].keys(): for subroutine_name in registry[module_name][scheme_name].keys(): if not len(registry[module_name][scheme_name][subroutine_name]) == 2: raise Exception('Error parsing start and end lines for subroutine {0} in module {1}'.format(subroutine_name, module_name)) logging.debug('Parsing file {0} with registry {1}'.format(filename, registry)) for scheme_name in registry[module_name].keys(): for subroutine_name in registry[module_name][scheme_name].keys(): # Record the order of variables in the call list to each subroutine in a list if not module_name in arguments.keys(): arguments[module_name] = {} if not scheme_name in arguments[module_name].keys(): arguments[module_name][scheme_name] = {} if not subroutine_name in arguments[module_name][scheme_name].keys(): arguments[module_name][scheme_name][subroutine_name] = [] # Find the argument table corresponding to each subroutine by searching # "upward" from the subroutine definition line for the "arg_table_SubroutineName" section table_found = False header_line_number = None for line_number in range(registry[module_name][scheme_name][subroutine_name][0], -1, -1): line = lines[line_number] words = line.split() for word in words: if (len(words) > 2 and words[0] in ['!!', '!>'] and '\section' in words[1] and 'arg_table_{0}'.format(subroutine_name) in words[2]): table_found = True header_line_number = line_number + 1 table_name = subroutine_name break else: for word in words: if 'arg_table_{0}'.format(subroutine_name) in word: raise Exception("Malformatted table found in {0} / {1} / {2} / {3}".format(filename, module_name, scheme_name, subroutine_name)) if table_found: break # If an argument table is found, parse it if table_found: # Separate the table headers table_header = lines[header_line_number].split('|') # Check for blank table if len(table_header) <= 1: logging.debug('Skipping blank table {0}'.format(table_name)) table_found = False continue # Extract table header table_header = [x.strip() for x in table_header[1:-1]] # Check that only valid table headers are used for item in table_header: if not item in VALID_ITEMS['header']: raise Exception('Invalid column header {0} in argument table {1}'.format(item, table_name)) # Locate mandatory column 'standard_name' try: standard_name_index = table_header.index('standard_name') except ValueError: raise Exception('Mandatory column standard_name not found in argument table {0}'.format(table_name)) # Get all of the variable information in table end_of_table = False line_number = header_line_number + 2 while not end_of_table: line = lines[line_number] words = line.split('|') if len(words) == 1: if words[0] == '!!': end_of_table = True else: raise Exception('Encountered invalid line "{0}" in argument table {1}'.format(line, table_name)) else: var_items = [x.strip() for x in words[1:-1]] if not len(var_items) == len(table_header): raise Exception('Error parsing variable entry "{0}" in argument table {1}'.format(var_items, table_name)) var_name = var_items[standard_name_index] # Column standard_name cannot be left blank in scheme_tables if not var_name: raise Exception('Encountered line "{0}" without standard name in argument table {1}'.format(line, table_name)) # Add standard_name to argument list for this subroutine arguments[module_name][scheme_name][subroutine_name].append(var_name) var = Var.from_table(table_header,var_items) # Check for incompatible definitions with CCPP mandatory variables if var_name in CCPP_MANDATORY_VARIABLES.keys() and not CCPP_MANDATORY_VARIABLES[var_name].compatible(var): raise Exception('Entry for variable {0}'.format(var_name) + \ ' in argument table of subroutine {0}'.format(subroutine_name) +\ ' is incompatible with mandatory variable:\n' +\ ' existing: {0}\n'.format(CCPP_MANDATORY_VARIABLES[var_name].print_debug()) +\ ' vs. new: {0}'.format(var.print_debug())) # Record the location of this variable: module, scheme, table container = encode_container(module_name, scheme_name, table_name) var.container = container # Add variable to metadata dictionary if not var_name in metadata.keys(): metadata[var_name] = [var] else: for existing_var in metadata[var_name]: if not existing_var.compatible(var): raise Exception('New entry for variable {0}'.format(var_name) + \ ' in argument table of subroutine {0}'.format(subroutine_name) +\ ' is incompatible with existing entry:\n' +\ ' existing: {0}\n'.format(existing_var.print_debug()) +\ ' vs. new: {0}'.format(var.print_debug())) metadata[var_name].append(var) line_number += 1 # After parsing entire metadata table for the subroutine, check that all mandatory CCPP variables are present for var_name in CCPP_MANDATORY_VARIABLES.keys(): if not var_name in arguments[module_name][scheme_name][subroutine_name]: raise Exception('Mandatory CCPP variable {0} not declared in metadata table of subroutine {1}'.format( var_name, subroutine_name)) # For CCPP-compliant files (i.e. files with metadata tables, perform additional checks) if len(metadata.keys()) > 0: # Check that all subroutine "root" names in the current module are equal to scheme_name # and that there are exactly three subroutines for scheme X: X_init, X_run, X_finalize message = '' abort = False for scheme_name in registry[module_name].keys(): # Pre-generate error message message += 'Check that all subroutines in module {0} have the same root name:\n'.format(module_name) message += ' i.e. scheme_A_init, scheme_A_run, scheme_A_finalize\n' message += 'Here is a list of the subroutine names for scheme {0}:\n'.format(scheme_name) message += '{0}\n\n'.format(', '.join(sorted(registry[module_name][scheme_name].keys()))) if (not len(registry[module_name][scheme_name].keys()) == 3): logging.exception(message) abort = True else: for suffix in subroutine_suffices: subroutine_name = '{0}_{1}'.format(scheme_name, suffix) if not subroutine_name in registry[module_name][scheme_name].keys(): logging.exception(message) abort = True if abort: raise Exception(message) # Debugging output to screen and to XML if debug and len(metadata.keys()) > 0: # To screen logging.debug('Module name: {0}'.format(module_name)) for scheme_name in registry[module_name].keys(): logging.debug('Scheme name: {0}'.format(scheme_name)) for subroutine_name in registry[module_name][scheme_name].keys(): container = encode_container(module_name, scheme_name, subroutine_name) vars_in_subroutine = [] for var_name in metadata.keys(): for var in metadata[var_name]: if var.container == container: vars_in_subroutine.append(var_name) logging.debug('Variables in subroutine {0}: {1}'.format(subroutine_name, ', '.join(vars_in_subroutine))) # To XML for scheme_name in registry[module_name].keys(): top = ET.Element('scheme') top.set('module', scheme_name) for subroutine_name in registry[module_name][scheme_name].keys(): sub_sub = ET.SubElement(top, 'subroutine') sub_sub.set('name', subroutine_name) container = encode_container(module_name, scheme_name, subroutine_name) # Variable output in order of calling arguments for var_name in arguments[module_name][scheme_name][subroutine_name]: for var in metadata[var_name]: if var.container == container: sub_var = var.to_xml(ET.SubElement(sub_sub, 'variable')) indent(top) tree = ET.ElementTree(top) xmlfile = scheme_name + '.xml' tree.write(xmlfile, xml_declaration=True, encoding='utf-8', method="xml") logging.info('Parsed tables in scheme {0}; output => {1}'.format(scheme_name, xmlfile)) # Standard output to screen elif len(metadata.keys()) > 0: for scheme_name in registry[module_name].keys(): logging.info('Parsed tables in scheme {0}'.format(scheme_name)) # End of loop over all module_names return (metadata, arguments)
def write(self, fields, queries, java_package=None, java_class="DataStructure", java="-", **kwargs): with open_maybe_stdout(java) as f: writer = f.write if java_package: writer("package {};\n\n".format(java_package)) writer( "public class {} implements java.io.Serializable {{\n".format( java_class)) # record type private_members = [] RECORD_NAME = self.record_type() for q in queries: private_members += list((f, ty.gen_type(self)) for f, ty in q.impl.private_members()) _gen_record_type(RECORD_NAME, list(fields.items()), private_members, writer) # auxiliary type definitions seen = set() for q in queries: for t in q.impl.auxtypes(): _gen_aux_type(t, self, writer, seen) # constructor writer(" public {}() {{\n".format(java_class)) for q in queries: writer(indent(" ", q.impl.construct(self, This()))) writer(" }\n") # get current size writer(" int my_size = 0;\n") writer(" int size() { return my_size; }\n") # add routine writer(" public void add({} x) {{\n".format(RECORD_NAME)) writer(" ++my_size;\n") for q in queries: writer(indent(" ", q.impl.gen_insert(self, "x", This()))) writer(" }\n") # remove routine writer(" public void remove({} x) {{\n".format(RECORD_NAME)) writer(" --my_size;\n") for q in queries: writer(indent(" ", q.impl.gen_remove(self, "x", This()))) writer(" }\n") # update routines for f, ty in fields.items(): writer(" void update{}({} x, {} val) {{\n".format( capitalize(f), self.record_type(), ty)) writer(" if ({} != val) {{\n".format(self.get_field("x", f))) for q in queries: writer( indent( " ", q.impl.gen_update(self, fields, "x", {f: "val"}, This()))) writer(" {} = val;\n".format(self.get_field("x", f))) writer(" }\n") writer(" }\n") writer(" void update({} x, {}) {{\n".format( self.record_type(), ", ".join("{} {}".format(ty, f) for f, ty in fields.items()))) for q in queries: writer( indent( " ", q.impl.gen_update(self, fields, "x", {f: f for f in fields}, This()))) for f, ty in fields.items(): writer(" {} = {};\n".format(self.get_field("x", f), f)) writer(" }\n") # query routines for q in queries: for f, ty in q.impl.fields(): writer(" /*private*/ {} {};\n".format( ty.gen_type(self), f)) it_name = "{}_iterator".format(q.name) writer( " /*private*/ static final class {} implements java.util.Iterator<{}> {{\n" .format(it_name, RECORD_NAME)) state = q.impl.state() writer(" {} parent;\n".format(java_class)) vars_needed = [(v, ty) for v, ty in q.vars if q.impl.needs_var(v)] for v, ty in vars_needed: writer(" final {} {};\n".format(ty, v)) for f, ty in state: writer(" {} {};\n".format(ty.gen_type(self), f)) writer(" {}({} parent{}{}) {{\n".format( it_name, java_class, "".join(", {} {}".format(ty, v) for v, ty in vars_needed), "".join(", {} {}".format(ty.gen_type(self), f) for f, ty in state))) writer(" this.parent = parent;\n") for v, ty in vars_needed: writer(" this.{v} = {v};\n".format(v=v)) for f, ty in state: writer(" this.{f} = {f};\n".format(f=f)) writer(" }\n") writer(" @Override public boolean hasNext() {\n") proc, ret = q.impl.gen_has_next( self, parent_structure=TupleInstance("parent"), iterator=This()) writer(indent(" ", proc)) writer(" return {};\n".format(ret)) writer(" }\n") writer( " @Override public {} next() {{\n".format(RECORD_NAME)) proc, ret = q.impl.gen_next( self, parent_structure=TupleInstance("parent"), iterator=This()) writer(indent(" ", proc)) writer(" return {};\n".format(ret)) writer(" }\n") writer(" @Override public void remove() {\n") writer(" --parent.my_size;\n") proc, removed = q.impl.gen_remove_in_place( self, parent_structure=TupleInstance("parent"), iterator=This()) writer(indent(" ", proc)) for q2 in queries: if q2 != q: writer( indent( " ", q2.impl.gen_remove( self, removed, parent_structure=TupleInstance("parent")))) writer(" }\n") writer(" }\n") writer(" public java.util.Iterator<{}> {}({}) {{\n".format( RECORD_NAME, q.name, ", ".join("{} {}".format(ty, v) for v, ty in q.vars))) proc, stateExps = q.impl.gen_query(self, q.vars, This()) writer(indent(" ", proc)) writer(" return new {}(this{}{});\n".format( it_name, "".join(", {}".format(v) for v, ty in vars_needed), "".join(", {}".format(e) for e in stateExps))) writer(" }\n") writer(" public {} {}_1({}) {{\n".format( RECORD_NAME, q.name, ", ".join("{} {}".format(ty, v) for v, ty in q.vars))) proc, result = q.impl.gen_query_one(self, q.vars, This()) writer(indent(" ", proc)) writer(" return {};\n".format(result)) writer(" }\n") writer("}\n")
def __repr__(self): return "A " + type(self).__name__ + " containing :\n" + '\n'.join( [indent(str(e)) for e in self.boxes])
def add_footprint_variant( key: str, name: str, density_level: str, *, gap: Optional[float] = None, footprint: Optional[FootprintDimensions] = None) -> None: """ Generate a footprint variant. Note: Either the toe extension or footprint dimensions must be set. """ if gap is not None and footprint is not None: raise ValueError('Only toe extension or footprint may be set') if gap is None and footprint is None: raise ValueError( 'Either toe extension or footprint must be set') uuid_footprint = _uuid('footprint-{}'.format(key)) uuid_text_name = _uuid('text-name-{}'.format(key)) uuid_text_value = _uuid('text-value-{}'.format(key)) uuid_silkscreen_top = _uuid('line-silkscreen-top-{}'.format(key)) uuid_silkscreen_bot = _uuid('line-silkscreen-bot-{}'.format(key)) uuid_courtyard = _uuid('polygon-courtyard-{}'.format(key)) uuid_outline_top = _uuid('polygon-outline-top-{}'.format(key)) uuid_outline_bot = _uuid('polygon-outline-bot-{}'.format(key)) uuid_outline_left = _uuid('polygon-outline-left-{}'.format(key)) uuid_outline_right = _uuid('polygon-outline-right-{}'.format(key)) uuid_outline_around = _uuid( 'polygon-outline-around-{}'.format(key)) uuid_polarization_mark = _uuid( 'polygon-polarization-mark-{}'.format(key)) # Max boundary max_x = 0.0 max_y = 0.0 # Line width adjusted for size of element if config.body.length >= 2.0: silk_lw = line_width doc_lw = line_width elif config.body.length >= 1.0: silk_lw = line_width_thin doc_lw = line_width_thin else: silk_lw = line_width_thin doc_lw = line_width_thinner lines.append(' (footprint {}'.format(uuid_footprint)) lines.append(' (name "{}")'.format(name)) lines.append(' (description "")') # Pads if footprint is not None: pad_width = footprint.pad_width pad_length = footprint.pad_length pad_gap = footprint.pad_gap pad_dx = (pad_gap / 2 + pad_length / 2) # x offset (delta-x) elif gap is not None: pad_gap = gap pad_width = config.body.width + get_by_density( config.body.length, density_level, 'side') pad_toe = get_by_density(config.body.length, density_level, 'toe') pad_length = (config.body.length - gap) / 2 + pad_toe pad_dx = (gap / 2 + pad_length / 2) # x offset (delta-x) else: raise ValueError('Either footprint or gap must be set') for p in [0, 1]: pad_uuid = uuid_pads[p - 1] sign = -1 if p == 1 else 1 lines.append( ' (pad {} (side top) (shape rect)'.format(pad_uuid)) lines.append( ' (position {} 0.0) (rotation 0.0) (size {} {}) (drill 0.0)' .format( ff(sign * pad_dx), ff(pad_length), ff(pad_width), )) max_x = max(max_x, pad_length / 2 + sign * pad_dx) lines.append(' )') max_y = max(max_y, config.body.width / 2) max_y = max(max_y, pad_width / 2) # Documentation half_gap_raw = (config.body.gap or pad_gap) / 2 half_gap = ff(half_gap_raw) if footprint is None: # We assume that leads are across the entire width of the part (e.g. MLCC) dx = ff(config.body.length / 2) dy = ff(config.body.width / 2) lines.append(' (polygon {} (layer {})'.format( uuid_outline_left, 'top_documentation')) lines.append(' (width 0.0) (fill true) (grab_area false)') lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format( dx, dy)) # NW lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format( half_gap, dy)) # NE lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( half_gap, dy)) # SE lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( dx, dy)) # SW lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format( dx, dy)) # NW lines.append(' )') lines.append(' (polygon {} (layer {})'.format( uuid_outline_right, 'top_documentation')) lines.append(' (width 0.0) (fill true) (grab_area false)') lines.append(' (vertex (position {} {}) (angle 0.0))'.format( dx, dy)) # NE lines.append(' (vertex (position {} {}) (angle 0.0))'.format( half_gap, dy)) # NW lines.append( ' (vertex (position {} -{}) (angle 0.0))'.format( half_gap, dy)) # SW lines.append( ' (vertex (position {} -{}) (angle 0.0))'.format( dx, dy)) # SE lines.append(' (vertex (position {} {}) (angle 0.0))'.format( dx, dy)) # NE lines.append(' )') dy = ff(config.body.width / 2 - doc_lw / 2) lines.append(' (polygon {} (layer {})'.format( uuid_outline_top, 'top_documentation')) lines.append( ' (width {}) (fill false) (grab_area false)'.format( doc_lw)) lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format( half_gap, dy)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( half_gap, dy)) lines.append(' )') lines.append(' (polygon {} (layer {})'.format( uuid_outline_bot, 'top_documentation')) lines.append( ' (width {}) (fill false) (grab_area false)'.format( doc_lw)) lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( half_gap, dy)) lines.append( ' (vertex (position {} -{}) (angle 0.0))'.format( half_gap, dy)) lines.append(' )') else: # We have more precise information about the lead (e.g. molded # packages where leads are not the full width of the package). dx = ff(config.body.length / 2 - doc_lw / 2) dy = ff(config.body.width / 2 - doc_lw / 2) lines.append(' (polygon {} (layer {})'.format( uuid_outline_around, 'top_documentation')) lines.append( ' (width {}) (fill false) (grab_area false)'.format( doc_lw)) lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format(dx, dy)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( dx, dy)) lines.append( ' (vertex (position {} -{}) (angle 0.0))'.format(dx, dy)) lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( dx, dy)) lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format(dx, dy)) lines.append(' )') dx = ff(config.body.length / 2) dy = ff((config.body.lead_width or footprint.pad_width) / 2) lines.append(' (polygon {} (layer {})'.format( uuid_outline_left, 'top_documentation')) lines.append(' (width 0.0) (fill true) (grab_area false)') lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format(dx, dy)) lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format( half_gap, dy)) lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( half_gap, dy)) lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( dx, dy)) lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format(dx, dy)) lines.append(' )') lines.append(' (polygon {} (layer {})'.format( uuid_outline_right, 'top_documentation')) lines.append(' (width 0.0) (fill true) (grab_area false)') lines.append(' (vertex (position {} {}) (angle 0.0))'.format( dx, dy)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( half_gap, dy)) lines.append( ' (vertex (position {} -{}) (angle 0.0))'.format( half_gap, dy)) lines.append( ' (vertex (position {} -{}) (angle 0.0))'.format(dx, dy)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( dx, dy)) lines.append(' )') if polarization: polarization_mark_width = config.body.width / 8 dx_outer = ff(half_gap_raw - polarization_mark_width / 2) dx_inner = ff(half_gap_raw - polarization_mark_width * 1.5) dy = ff(config.body.width / 2 - doc_lw) lines.append(' (polygon {} (layer {})'.format( uuid_polarization_mark, 'top_documentation')) lines.append(' (width 0.0) (fill true) (grab_area true)') lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format( dx_outer, dy)) lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format( dx_inner, dy)) lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( dx_inner, dy)) lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( dx_outer, dy)) lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format( dx_outer, dy)) lines.append(' )') # Silkscreen if config.body.length > 1.0: if polarization: dx_unmarked = pad_dx + pad_length / 2 dx_marked = dx_unmarked + silk_lw / 2 + silkscreen_clearance dy = ff( max( config.body.width / 2 + silk_lw / 2, # Based on body width pad_width / 2 + silk_lw / 2 + silkscreen_clearance, # Based on pad width )) lines.append(' (polygon {} (layer {})'.format( uuid_silkscreen_top, 'top_placement')) lines.append( ' (width {}) (fill false) (grab_area false)'.format( silk_lw)) lines.append( ' (vertex (position {} {}) (angle 0.0))'.format( ff(dx_unmarked), dy)) lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format( ff(dx_marked), dy)) lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( ff(dx_marked), dy)) lines.append( ' (vertex (position {} -{}) (angle 0.0))'.format( ff(dx_unmarked), dy)) lines.append(' )') else: assert gap is not None, \ "Support for non-polarized packages with irregular pads not yet fully implemented" dx = ff(gap / 2 - silk_lw / 2 - silkscreen_clearance) dy = ff(config.body.width / 2 + silk_lw / 2) lines.append(' (polygon {} (layer {})'.format( uuid_silkscreen_top, 'top_placement')) lines.append( ' (width {}) (fill false) (grab_area false)'.format( silk_lw)) lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format( dx, dy)) lines.append( ' (vertex (position {} {}) (angle 0.0))'.format( dx, dy)) lines.append(' )') lines.append(' (polygon {} (layer {})'.format( uuid_silkscreen_bot, 'top_placement')) lines.append( ' (width {}) (fill false) (grab_area false)'.format( silk_lw)) lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( dx, dy)) lines.append( ' (vertex (position {} -{}) (angle 0.0))'.format( dx, dy)) lines.append(' )') # Courtyard courtyard_excess = get_by_density(config.body.length, density_level, 'courtyard') lines.extend( indent( 2, generate_courtyard( uuid=uuid_courtyard, max_x=max_x, max_y=max_y, excess_x=courtyard_excess, excess_y=courtyard_excess, ))) # Labels if config.body.width < 2.0: offset = label_offset_thin else: offset = label_offset dy = ff(config.body.width / 2 + offset) # y offset (delta-y) text_attrs = '(height {}) (stroke_width 0.2) ' \ '(letter_spacing auto) (line_spacing auto)'.format(pkg_text_height) lines.append( ' (stroke_text {} (layer top_names)'.format(uuid_text_name)) lines.append(' {}'.format(text_attrs)) lines.append( ' (align center bottom) (position 0.0 {}) (rotation 0.0)'. format(dy)) lines.append( ' (auto_rotate true) (mirror false) (value "{{NAME}}")') lines.append(' )') lines.append( ' (stroke_text {} (layer top_values)'.format(uuid_text_value)) lines.append(' {}'.format(text_attrs)) lines.append( ' (align center top) (position 0.0 -{}) (rotation 0.0)'. format(dy)) lines.append( ' (auto_rotate true) (mirror false) (value "{{VALUE}}")') lines.append(' )') lines.append(' )')
def syncBrac(self, path): logger.debug('syncing brac: %s' % path) if not os.path.isfile(path): return False tempdir = tempfile.mkdtemp() if not os.path.isdir(tempdir): logger.error('tempdir %s does not exists' % tempdir) return zf_brac = zipfile.ZipFile(path, 'a') zf_brac.extract('brac.xml', tempdir) bracxml = et.parse(os.path.join(tempdir, 'brac.xml')) bracdef = bracxml.getroot() layers = bracdef.find('layers'); if layers == None: logger.error('brac %s has no layers node!' % path) return resolution = dict(zip(['width', 'height'], bracdef.attrib['resolution'].split())) vars = { 'tools': os.path.join(self.homedir, 'tools'), 'bracpath-brac': path, 'bracname-brac': os.path.split(path)[1], 'bracname-zip': "%s.%s.zip" % (os.path.split(path)[1], time.time()), 'bracpath-zip': "%s.%s.zip" % (path, time.time()), 'tempdir': tempdir, } for layer in layers: if layer.tag != 'bric': continue bric = layer #extracting bric files to its temp directory revision = str(int(bric.attrib['revision']) + 1) vars['bricid'] = bric.attrib['id'] vars['bricdir'] = os.path.join(vars['tempdir'], r'bric.%s' % vars['bricid']) vars['bricpath'] = os.path.join(vars['bricdir'], r'%s.png' % revision) vars['bricdefpath'] = os.path.join(vars['bricdir'], 'bric.xml') bricid = bric.attrib['id'] newbricdir = os.path.join(tempdir, r'bric.%s' % bricid) zf_brac.extract(r'bric.%s/bric.xml' % bricid, tempdir) bricxml = et.parse(os.path.join(newbricdir, 'bric.xml')) bricdef = bricxml.getroot() if not self.needUpdate(bric.attrib['timeinterval'], bricdef[len(bricdef) - 1].attrib['date']): continue #extracting bric attributes bricregion = dict(zip(['x', 'y', 'w', 'h'], bricdef.attrib['region'].split())) bricres = dict(zip(['w', 'h'], bric.attrib['resolution'].split())) params = { 'width' : int(resolution['width']), 'height': int(resolution['height']), } #captureurl.capture(vars['bricurl'], vars['bricpath'], params) #taking screenshot try: image = self.renderer.render( url = bricdef.attrib['url'], filename = vars['bricpath'], width = int(bricres['w']), height = int(bricres['h']), x = int(bricregion['x']), y = int(bricregion['y']), w = int(bricregion['w']), h = int(bricregion['h']) ) #cropping #image = image.copy(int(vars['bricx']), int(vars['bricy']), int(vars['bricw']), int(vars['brich'])) #image.save(vars['bricpath'], 'png') if os.path.exists(vars['bricpath']): logger.debug('generated %s' % vars['bricpath']) else: logger.error('failed to generate %s' % vars['bricpath']) continue except RuntimeError, e: logger.error(e.message) continue #updating brac and bric xml files snapshot_time = time.strftime('%Y-%m-%d %H:%M:%S') snapshot = et.Element('snapshot', {'revision': revision, 'date': snapshot_time}) bricdef.append(snapshot) common.indent(bricdef) bricxml.write(os.path.join(newbricdir, 'bric.xml')) bric.attrib['revision'] = revision bric.attrib['lastupdate'] = snapshot_time
class BracSynchronizer(QtGui.QSystemTrayIcon): #------------------------------------------------------------------------------- def __init__(self, icon, parent=None): QtGui.QSystemTrayIcon.__init__(self, icon, parent) logger.debug('Initializing') self.trayMenu = QtGui.QMenu(parent) self.actionStartStop = self.trayMenu.addAction("Start/Stop") self.actionSynchronize = self.trayMenu.addAction("Synchronize") self.actionSettings = self.trayMenu.addAction("Settings") self.actionExit = self.trayMenu.addAction("Exit") self.actionStartStop .triggered.connect(self.start) self.actionSynchronize.triggered.connect(self.syncBracs) self.actionSettings .triggered.connect(self.settings) self.actionExit .triggered.connect(self.exit) self.setContextMenu(self.trayMenu) if getattr(sys, 'frozen', False): self.homedir = os.path.dirname(sys.executable) elif __file__: file_path = os.path.dirname(__file__) self.homedir = os.path.abspath(os.path.join(file_path, os.path.pardir)) if common.getos() == 'win': p = os.path.join(os.environ['APPDATA'], 'uofs/bric-a-brac') elif common.getos() == 'mac': p = os.path.join('/Users', os.environ['USER']) p = os.path.join(p, 'Library/Application Support') p = os.path.join(p, 'uofs/bric-a-brac') if not os.path.exists(p): os.makedirs(p) self.entries_path = os.path.join(p, 'bracList.json') self.loadEntries() self.renderer = WebkitRenderer() mtimer = QtCore.QTimer(self) mtimer.timeout.connect(self.checkIfEntriesModified) mtimer.start(10000) stimer = QtCore.QTimer(self) stimer.timeout.connect(self.sync) stimer.start(20000) #------------------------------------------------------------------------------- def loadEntries(self): self.entries = [] try: with open(self.entries_path, 'r') as f: self.entries = json.loads(f.read()) f.close() logger.debug('loaded entries from %s' % self.entries_path) except ValueError as e: logger.error('couldn\'t load entries file at %s' % self.entries_path) pass except IOError as e: logger.debug('couldn\'t load entries, creating initial entiries file at %s' % self.entries_path) f = open(self.entries_path, 'w') f.close() #------------------------------------------------------------------------------- def saveEntries(self): out_str = json.dumps(self.entries, sort_keys = True, indent = 2) f = open(self.entries_path, 'w') f.write(out_str) f.close() logger.debug('saved entries to %s' % self.entries_path) #------------------------------------------------------------------------------- def checkIfEntriesModified(self): logger.debug('checking if any entries modified') save = False toremove = [] for e in self.entries: if not os.path.exists(e['path']) \ or (e['type'] == 'dir' and not os.path.isdir(e['path'])) \ or (e['type'] == 'file' and not os.path.isfile(e['path'])): toremove.append(e['path']) continue do_update = False if os.path.getmtime(e['path']) != e['mtime']: do_update = True elif e['type'] == 'dir': for b in e['bracList']: if not os.path.isfile(b['path']) or os.path.getmtime(b['path']) != b['mtime']: do_update = True break if not do_update and e.has_key('subdirs'): for sd in e['subdirs']: if not os.path.isdir(sd): do_update = True break elif os.path.getmtime(sd) != e['subdirs'][sd]: do_update = True break if do_update: entryutils.updateEntryTimeTable(e) save = True break #I guess it's better not to remove them #I'll leave it up to the user to clean up the entries. #if len(toremove) > 0: # self.entries = [x for x in self.entries if x['path'] not in toremove] # save = True if save: self.saveEntries() #------------------------------------------------------------------------------- def sync(self): logger.debug('syncing entries') self.syncBracs() self.setStatus('off') #------------------------------------------------------------------------------- def setStatus(self, status, notice = None): #self.icon.setStatus(status) if status == "off": self.setIcon(QtGui.QIcon("resources/brac-16x16.png")) elif status == "on": self.setIcon(QtGui.QIcon("resources/brac-syncing-16x16.png")) #if status == "on" and not self.popup.opened(): # self.popup.show(notice) #------------------------------------------------------------------------------- def start(self): print "again" #------------------------------------------------------------------------------- def settings(self): settings_dlg = DialogSettings(self); settings_dlg.exec_(); #------------------------------------------------------------------------------- def exit(self): QtCore.QCoreApplication.instance().quit() #------------------------------------------------------------------------------- def setEntries(self, entries): self.entries = entries self.saveEntries() self.syncBracs() #------------------------------------------------------------------------------- def needUpdate(self, timeinterval, lastupdate): lastupdate = time.strptime(lastupdate, '%Y-%m-%d %H:%M:%S') dt_lastupdate = dt.datetime.fromtimestamp(time.mktime(lastupdate)) interval = dict(zip(['week', 'day', 'hour', 'minute', 'second'], [int(x) for x in timeinterval.replace('-', ' ').replace(':', ' ').split()])) dt_deltatime = dt.timedelta(weeks = interval['week'], days = interval['day'], hours = interval['hour'], minutes = interval['minute'], seconds = interval['second']) dt_nexttime = dt_lastupdate + dt_deltatime dt_curtime = dt.datetime.fromtimestamp(time.time()) if dt_nexttime > dt_curtime or dt_nexttime == dt_lastupdate: return False return True #------------------------------------------------------------------------------- def syncBracs(self): for e in self.entries: if not e.get('bracList', False): continue toremove = [] dirModified = False for b in e['bracList']: if not os.path.isfile(b['path']): toremove.append(b['path']) continue if os.path.getmtime(b['path']) != b['mtime']: b['timetable'] = entryutils.getBracTimeTable(b['path']) b['mtime'] = os.path.getmtime(b['path']) needupdate = False for bric in b['timetable']: if self.needUpdate(bric['timeinterval'], bric['lastupdate']): needupdate = True break if needupdate: dirModified = True self.setStatus('on', 'Synchronizing!\n%s' % b['path']) self.syncBrac(b['path']) b['timetable'] = entryutils.getBracTimeTable(b['path']) b['mtime'] = os.path.getmtime(b['path']) if dirModified: e['mtime'] = os.path.getmtime(e['path']) if len(toremove) > 0: e['bracList'] = [x for x in e['bracList'] if x['path'] not in toremove] self.saveEntries() #------------------------------------------------------------------------------- def syncBrac(self, path): logger.debug('syncing brac: %s' % path) if not os.path.isfile(path): return False tempdir = tempfile.mkdtemp() if not os.path.isdir(tempdir): logger.error('tempdir %s does not exists' % tempdir) return zf_brac = zipfile.ZipFile(path, 'a') zf_brac.extract('brac.xml', tempdir) bracxml = et.parse(os.path.join(tempdir, 'brac.xml')) bracdef = bracxml.getroot() layers = bracdef.find('layers'); if layers == None: logger.error('brac %s has no layers node!' % path) return resolution = dict(zip(['width', 'height'], bracdef.attrib['resolution'].split())) vars = { 'tools': os.path.join(self.homedir, 'tools'), 'bracpath-brac': path, 'bracname-brac': os.path.split(path)[1], 'bracname-zip': "%s.%s.zip" % (os.path.split(path)[1], time.time()), 'bracpath-zip': "%s.%s.zip" % (path, time.time()), 'tempdir': tempdir, } for layer in layers: if layer.tag != 'bric': continue bric = layer #extracting bric files to its temp directory revision = str(int(bric.attrib['revision']) + 1) vars['bricid'] = bric.attrib['id'] vars['bricdir'] = os.path.join(vars['tempdir'], r'bric.%s' % vars['bricid']) vars['bricpath'] = os.path.join(vars['bricdir'], r'%s.png' % revision) vars['bricdefpath'] = os.path.join(vars['bricdir'], 'bric.xml') bricid = bric.attrib['id'] newbricdir = os.path.join(tempdir, r'bric.%s' % bricid) zf_brac.extract(r'bric.%s/bric.xml' % bricid, tempdir) bricxml = et.parse(os.path.join(newbricdir, 'bric.xml')) bricdef = bricxml.getroot() if not self.needUpdate(bric.attrib['timeinterval'], bricdef[len(bricdef) - 1].attrib['date']): continue #extracting bric attributes bricregion = dict(zip(['x', 'y', 'w', 'h'], bricdef.attrib['region'].split())) bricres = dict(zip(['w', 'h'], bric.attrib['resolution'].split())) params = { 'width' : int(resolution['width']), 'height': int(resolution['height']), } #captureurl.capture(vars['bricurl'], vars['bricpath'], params) #taking screenshot try: image = self.renderer.render( url = bricdef.attrib['url'], filename = vars['bricpath'], width = int(bricres['w']), height = int(bricres['h']), x = int(bricregion['x']), y = int(bricregion['y']), w = int(bricregion['w']), h = int(bricregion['h']) ) #cropping #image = image.copy(int(vars['bricx']), int(vars['bricy']), int(vars['bricw']), int(vars['brich'])) #image.save(vars['bricpath'], 'png') if os.path.exists(vars['bricpath']): logger.debug('generated %s' % vars['bricpath']) else: logger.error('failed to generate %s' % vars['bricpath']) continue except RuntimeError, e: logger.error(e.message) continue #updating brac and bric xml files snapshot_time = time.strftime('%Y-%m-%d %H:%M:%S') snapshot = et.Element('snapshot', {'revision': revision, 'date': snapshot_time}) bricdef.append(snapshot) common.indent(bricdef) bricxml.write(os.path.join(newbricdir, 'bric.xml')) bric.attrib['revision'] = revision bric.attrib['lastupdate'] = snapshot_time common.indent(bracdef) bracxml.write(os.path.join(tempdir, 'brac.xml')) zf_brac.close() if common.getos() == 'win': os.system('ren "%(bracpath-brac)s" "%(bracname-zip)s"' % vars) os.system('cd /d %(tools)s & 7za.exe a "%(bracpath-zip)s" "%(tempdir)s/*"' % vars) os.system('ren "%(bracpath-zip)s" "%(bracname-brac)s"' % vars) if common.getos() == 'mac': os.system('mv "%(bracpath-brac)s" "%(bracpath-zip)s"' % vars) os.system('cd %(tools)s ; ./7za a "%(bracpath-zip)s" "%(tempdir)s/*"' % vars) os.system('mv "%(bracpath-zip)s" "%(bracpath-brac)s"' % vars) shutil.rmtree(tempdir) print 'sync done' return True
def __repr__(self): return "A " + type(self).__name__ + " filled with the data :\n" + '\n'.join( [indent("{}: {}".format(k, v)) for k, v in get_variables(self).items()])
def write(self, fields, queries, js="-", js_class="DataStructure", **kwargs): with open_maybe_stdout(js) as f: writer = f.write RECORD_NAME = js_class + "Entry" writer("/*\n * USAGE SUMMARY\n") writer(" * initialization:\n * ds = new {}();\n".format(js_class)) writer(" * get # of entries:\n * ds.size();\n") writer(" * add:\n * ds.add(new {}({}));\n".format(RECORD_NAME, ", ".join(f for f in fields))) writer(" * remove:\n * ds.remove(elem);\n") writer(" * update all fields:\n * ds.update(elem, {});\n".format(", ".join("new_{}".format(f) for f in fields))) for f in fields: writer(" * update {f}:\n * ds.update{F}(elem, new_{f});\n".format(f=f, F=capitalize(f))) writer(" * queries:\n") for q in queries: writer(" * ds.{n}({args}, function(elem) {{ ... }});\n".format(n=q.name, args=", ".join(a for a,t in q.vars))) writer(" * NOTE: Be careful not to add the same {} object more than once.\n".format(RECORD_NAME)) writer(" * NOTE: Be careful not to remove or update an entry that is not in the data structure.\n") writer(" * NOTE: You may not make changes (add/remove/update) in query callbacks.\n") writer(" * NOTE: Elements can be removed in-place during iteration: if your query callback returns a truthy value, then the element is removed.\n") writer(" */\n\n\n") # record type private_members = [] for q in queries: private_members += list((f, ty.gen_type(self)) for f, ty in q.impl.private_members()) _gen_record_type(RECORD_NAME, list(fields.items()), private_members, writer) # auxiliary type definitions seen = set() for q in queries: for t in q.impl.auxtypes(): _gen_aux_type(t, self, writer, seen) this = TupleInstance("this") # # constructor writer("function {}() {{\n".format(js_class)) writer(indent(" ", "this.my_size = 0;\n")) for q in queries: writer(indent(" ", q.impl.construct(self, this))) writer("}\n") # get current size writer("{}.prototype.size = function() {{ return this.my_size; }};\n".format(js_class)) # add routine writer("{}.prototype.add = function(x) {{\n".format(js_class)) writer(" ++this.my_size;\n") for q in queries: writer(indent(" ", q.impl.gen_insert(self, "x", this))) writer("};\n") # remove routine writer("{}.prototype.remove = function(x) {{\n".format(js_class)) writer(" --this.my_size;\n") for q in queries: writer(indent(" ", q.impl.gen_remove(self, "x", this))) writer("};\n") # update routines for f, ty in fields.items(): writer("{}.prototype.update{} = function(__x, new_val) {{\n".format(js_class, capitalize(f))) writer(" if ({} != new_val) {{\n".format(self.get_field("__x", f))) for q in queries: writer(indent(" ", q.impl.gen_update(self, fields, "__x", {f: "new_val"}, this))) writer(" {} = new_val;\n".format(self.get_field("__x", f))) writer(" }\n") writer(" }\n") writer("{}.prototype.update = function(__x, {}) {{\n".format(js_class, ", ".join(f for f, ty in fields.items()))) for q in queries: writer(indent(" ", q.impl.gen_update(self, fields, "__x", {f:f for f in fields}, this))) for f, ty in fields.items(): writer(" {} = {};\n".format(self.get_field("__x", f), f)) writer(" }\n") # query routines for q in queries: writer("{}.prototype.{} = function({}, __callback) {{\n".format(js_class, q.name, ", ".join(v for v,ty in q.vars))) proc, stateExps = q.impl.gen_query(self, q.vars, this) writer(indent(" ", proc)) state = q.impl.state() for (f, ty), e in zip(state, stateExps): writer(" var {} = {};\n".format(f, e)) writer(" for (;;) {\n") proc, has_next = q.impl.gen_has_next(self, parent_structure=this, iterator=This()) writer(indent(" ", proc)) writer(" if (!({})) break;\n".format(has_next)) proc, next = q.impl.gen_next(self, parent_structure=this, iterator=This()) writer(indent(" ", proc)) writer(" if (__callback({})) {{\n".format(next)) proc, next = q.impl.gen_remove_in_place(self, parent_structure=this, iterator=This()) writer(indent(" ", proc)) writer(" }\n") writer(" }\n") writer(" }\n")
def add_footprint_variant(key: str, name: str, density_level: str, toe_extension: float): uuid_footprint = _uuid('footprint-{}'.format(key)) uuid_text_name = _uuid('text-name-{}'.format(key)) uuid_text_value = _uuid('text-value-{}'.format(key)) uuid_silkscreen_top = _uuid('line-silkscreen-top-{}'.format(key)) uuid_silkscreen_bot = _uuid('line-silkscreen-bot-{}'.format(key)) uuid_courtyard = _uuid('polygon-courtyard-{}'.format(key)) uuid_outline_top = _uuid('polygon-outline-top-{}'.format(key)) uuid_outline_bot = _uuid('polygon-outline-bot-{}'.format(key)) uuid_outline_left = _uuid('polygon-outline-left-{}'.format(key)) uuid_outline_right = _uuid('polygon-outline-right-{}'.format(key)) # Max boundary max_x = 0.0 max_y = 0.0 # Line width adjusted for size of element if config.length >= 2.0: silk_lw = line_width doc_lw = line_width elif config.length >= 1.0: silk_lw = line_width_thin doc_lw = line_width_thin else: silk_lw = line_width_thin doc_lw = line_width_thinner lines.append(' (footprint {}'.format(uuid_footprint)) lines.append(' (name "{}")'.format(name)) lines.append(' (description "")') # Pads for p in [0, 1]: pad_uuid = uuid_pads[p - 1] sign = -1 if p == 1 else 1 # Note: We are using the gap from the actual resistors (Samsung), but calculate # the protrusion (toe and side) based on IPC7351. pad_width = config.width + get_by_density( config.length, density_level, 'side') pad_toe = get_by_density(config.length, density_level, 'toe') + toe_extension pad_length = (config.length - config.gap) / 2 + pad_toe dx = sign * (config.gap / 2 + pad_length / 2 ) # x offset (delta-x) lines.append( ' (pad {} (side top) (shape rect)'.format(pad_uuid)) lines.append( ' (position {} 0.0) (rotation 0.0) (size {} {}) (drill 0.0)' .format( ff(dx), ff(pad_length), ff(pad_width), )) max_x = max(max_x, pad_length / 2 + dx) lines.append(' )') # Documentation half_gap = ff(config.gap / 2) dx = ff(config.length / 2) dy = ff(config.width / 2) lines.append(' (polygon {} (layer {})'.format( uuid_outline_left, 'top_documentation')) lines.append(' (width 0.0) (fill true) (grab_area true)') lines.append(' (vertex (position -{} {}) (angle 0.0))'.format( dx, dy)) # NW lines.append(' (vertex (position -{} {}) (angle 0.0))'.format( half_gap, dy)) # NE lines.append(' (vertex (position -{} -{}) (angle 0.0))'.format( half_gap, dy)) # SE lines.append(' (vertex (position -{} -{}) (angle 0.0))'.format( dx, dy)) # SW lines.append(' (vertex (position -{} {}) (angle 0.0))'.format( dx, dy)) # NW lines.append(' )') lines.append(' (polygon {} (layer {})'.format( uuid_outline_right, 'top_documentation')) lines.append(' (width 0.0) (fill true) (grab_area true)') lines.append(' (vertex (position {} {}) (angle 0.0))'.format( dx, dy)) # NE lines.append(' (vertex (position {} {}) (angle 0.0))'.format( half_gap, dy)) # NW lines.append(' (vertex (position {} -{}) (angle 0.0))'.format( half_gap, dy)) # SW lines.append(' (vertex (position {} -{}) (angle 0.0))'.format( dx, dy)) # SE lines.append(' (vertex (position {} {}) (angle 0.0))'.format( dx, dy)) # NE lines.append(' )') dy = ff(config.width / 2 - doc_lw / 2) lines.append(' (polygon {} (layer {})'.format( uuid_outline_top, 'top_documentation')) lines.append( ' (width {}) (fill false) (grab_area true)'.format(doc_lw)) lines.append(' (vertex (position -{} {}) (angle 0.0))'.format( half_gap, dy)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( half_gap, dy)) lines.append(' )') lines.append(' (polygon {} (layer {})'.format( uuid_outline_bot, 'top_documentation')) lines.append( ' (width {}) (fill false) (grab_area true)'.format(doc_lw)) lines.append(' (vertex (position -{} -{}) (angle 0.0))'.format( half_gap, dy)) lines.append(' (vertex (position {} -{}) (angle 0.0))'.format( half_gap, dy)) lines.append(' )') max_y = max(max_y, config.width / 2) # Silkscreen if config.length > 1.0: dx = ff(config.gap / 2 - silk_lw / 2 - silkscreen_clearance) dy = ff(config.width / 2 + silk_lw / 2) lines.append(' (polygon {} (layer {})'.format( uuid_silkscreen_top, 'top_placement')) lines.append( ' (width {}) (fill false) (grab_area false)'.format( silk_lw)) lines.append( ' (vertex (position -{} {}) (angle 0.0))'.format(dx, dy)) lines.append(' (vertex (position {} {}) (angle 0.0))'.format( dx, dy)) lines.append(' )') lines.append(' (polygon {} (layer {})'.format( uuid_silkscreen_bot, 'top_placement')) lines.append( ' (width {}) (fill false) (grab_area false)'.format( silk_lw)) lines.append( ' (vertex (position -{} -{}) (angle 0.0))'.format( dx, dy)) lines.append( ' (vertex (position {} -{}) (angle 0.0))'.format(dx, dy)) lines.append(' )') max_y = max(max_y, config.width / 2 + silk_lw) # Courtyard courtyard_excess = get_by_density(config.length, density_level, 'courtyard') lines.extend( indent( 2, generate_courtyard( uuid=uuid_courtyard, max_x=max_x, max_y=max_y, excess_x=courtyard_excess, excess_y=courtyard_excess, ))) # Labels if config.width < 2.0: offset = label_offset_thin else: offset = label_offset dy = ff(config.width / 2 + offset) # y offset (delta-y) text_attrs = '(height {}) (stroke_width 0.2) ' \ '(letter_spacing auto) (line_spacing auto)'.format(pkg_text_height) lines.append( ' (stroke_text {} (layer top_names)'.format(uuid_text_name)) lines.append(' {}'.format(text_attrs)) lines.append( ' (align center bottom) (position 0.0 {}) (rotation 0.0)'. format(dy)) lines.append( ' (auto_rotate true) (mirror false) (value "{{NAME}}")') lines.append(' )') lines.append( ' (stroke_text {} (layer top_values)'.format(uuid_text_value)) lines.append(' {}'.format(text_attrs)) lines.append( ' (align center top) (position 0.0 -{}) (rotation 0.0)'. format(dy)) lines.append( ' (auto_rotate true) (mirror false) (value "{{VALUE}}")') lines.append(' )') lines.append(' )')