mirror of
				https://github.com/KevinMidboe/linguist.git
				synced 2025-10-29 17:50:22 +00:00 
			
		
		
		
	Improve Pan language support (#3691)
* Add a larger set of sample files for Pan This is a fairly good cross section of Pan based on code from: * https://github.com/quattor/template-library-examples * https://github.com/quattor/template-library-core * Add Pan language grammar
This commit is contained in:
		
				
					committed by
					
						 Paul Chaignon
						Paul Chaignon
					
				
			
			
				
	
			
			
			
						parent
						
							e9ec699931
						
					
				
				
					commit
					8d178bfaed
				
			
							
								
								
									
										3
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
								
							| @@ -860,3 +860,6 @@ | |||||||
| [submodule "vendor/grammars/sublime-fantom"] | [submodule "vendor/grammars/sublime-fantom"] | ||||||
| 	path = vendor/grammars/sublime-fantom | 	path = vendor/grammars/sublime-fantom | ||||||
| 	url = https://github.com/rkoeninger/sublime-fantom | 	url = https://github.com/rkoeninger/sublime-fantom | ||||||
|  | [submodule "vendor/grammars/language-pan"] | ||||||
|  | 	path = vendor/grammars/language-pan | ||||||
|  | 	url = https://github.com/quattor/language-pan | ||||||
|   | |||||||
| @@ -427,6 +427,8 @@ vendor/grammars/language-ncl: | |||||||
| - source.ncl | - source.ncl | ||||||
| vendor/grammars/language-ninja: | vendor/grammars/language-ninja: | ||||||
| - source.ninja | - source.ninja | ||||||
|  | vendor/grammars/language-pan: | ||||||
|  | - source.pan | ||||||
| vendor/grammars/language-povray: | vendor/grammars/language-povray: | ||||||
| - source.pov-ray sdl | - source.pov-ray sdl | ||||||
| vendor/grammars/language-regexp: | vendor/grammars/language-regexp: | ||||||
|   | |||||||
| @@ -3130,7 +3130,7 @@ Pan: | |||||||
|   color: "#cc0000" |   color: "#cc0000" | ||||||
|   extensions: |   extensions: | ||||||
|   - ".pan" |   - ".pan" | ||||||
|   tm_scope: none |   tm_scope: source.pan | ||||||
|   ace_mode: text |   ace_mode: text | ||||||
|   language_id: 276 |   language_id: 276 | ||||||
| Papyrus: | Papyrus: | ||||||
|   | |||||||
							
								
								
									
										59
									
								
								samples/Pan/ceph-raid.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								samples/Pan/ceph-raid.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,59 @@ | |||||||
|  | unique template site/filesystems/ceph-raid; | ||||||
|  |  | ||||||
|  | prefix '/system/blockdevices'; | ||||||
|  |  | ||||||
|  | variable CEPH_OSD_DISKS = { | ||||||
|  |     # SAS disks partitions | ||||||
|  |     disks = list(); | ||||||
|  |     foreach (disk; data; value('/hardware/harddisks')) { | ||||||
|  |         if (data['capacity'] > 1000 * GB) { | ||||||
|  |             append(disks, disk); | ||||||
|  |         }; | ||||||
|  |     }; | ||||||
|  |     disks; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | 'partitions' = { | ||||||
|  |     foreach (idx; disk; CEPH_OSD_DISKS) { | ||||||
|  |         partitions_add( | ||||||
|  |             disk, dict( | ||||||
|  |                 format('%s1', disk), 10 * GB, | ||||||
|  |                 format('%s2', disk), 5 * GB, | ||||||
|  |                 format('%s3', disk), -1)); | ||||||
|  |         SELF[format('%s1', disk)]['offset'] = 1; | ||||||
|  |     }; | ||||||
|  |     SELF; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | #raid for data | ||||||
|  | 'md' = { | ||||||
|  |     for (i = 0; i < length(CEPH_OSD_DISKS); i = i + 2) { | ||||||
|  |         for (j = 2; j <= 3; j = j + 1) { | ||||||
|  |             SELF[escape(format('md/%s0%s%d', CEPH_OSD_DISKS[i], CEPH_OSD_DISKS[i+1], j ))] = dict( | ||||||
|  |                 "device_list", list(format('partitions/%s%d', CEPH_OSD_DISKS[i], j), format('partitions/%s%d', CEPH_OSD_DISKS[i+1], j)), | ||||||
|  |                 "raid_level", 'RAID0', | ||||||
|  |                 "metadata", '1.2', | ||||||
|  |             ); | ||||||
|  |         }; | ||||||
|  |     }; | ||||||
|  |     SELF; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | # ceph OSD and journal fs | ||||||
|  | '/system/filesystems' = { | ||||||
|  |     # ga over software raids.. | ||||||
|  |     foreach (disk; data; value('/system/blockdevices/md')) { #check for data part/disk | ||||||
|  |         if (match(unescape(disk), '^md/.+0.+3$')) { | ||||||
|  |             append(merge(CEPH_FSOPTS_BASE, CEPH_DISK_OPTIONS[CEPH_FS], dict( | ||||||
|  |                 'mountpoint', format('/var/lib/ceph/osd/%s', replace('md/([a-z0A-Z]+)[0-9]*$', '$1', unescape(disk))), | ||||||
|  |                 'block_device', format('md/%s', disk), | ||||||
|  |             ))); | ||||||
|  |         } else if (match(unescape(disk), '^md/.+0.+2$')) { | ||||||
|  |             append(merge(CEPH_FSOPTS_DUMMY, dict( | ||||||
|  |                 'mountpoint', format('/dummy/%s', unescape(disk)), | ||||||
|  |                 'block_device', format('md/%s', disk) | ||||||
|  |             ))); | ||||||
|  |         }; | ||||||
|  |     }; | ||||||
|  |     SELF; | ||||||
|  | }; | ||||||
							
								
								
									
										11
									
								
								samples/Pan/cluster-A.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								samples/Pan/cluster-A.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | |||||||
|  | structure template site/nagios/hosts/cluster-A; | ||||||
|  |  | ||||||
|  | # let Nagios server A monitor B | ||||||
|  | # just an example to make the templates compile | ||||||
|  | "nagios-slave-B.example.org" = create (NAGIOS_QUATTOR_HOST); | ||||||
|  | "nagios-slave-B.example.org/alias" = "slave B"; | ||||||
|  | "nagios-slave-B.example.org/hostgroups" = list( "quattor-nodes" ); | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # "another-host-in-A.example.org" = create (NAGIOS_QUATTOR_HOST); | ||||||
|  | # "another-host-in-A.example.org/alias" = "another monitored host in cluster A"; | ||||||
							
								
								
									
										18
									
								
								samples/Pan/databases.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								samples/Pan/databases.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | |||||||
|  | template site/databases; | ||||||
|  |  | ||||||
|  | # Defines the mapping between the full hostname and the IP | ||||||
|  | # address. | ||||||
|  | final variable DB_IP = dict( | ||||||
|  |     escape("one"), "192.168.0.24", | ||||||
|  |     escape("hyp01"), "192.168.0.25", | ||||||
|  |     escape("vm"), "192.168.0.26", | ||||||
|  | ); | ||||||
|  |  | ||||||
|  | # Defines the mapping between the full hostname and the | ||||||
|  | # physical machine. | ||||||
|  | # A different hardware template must be used for each machine | ||||||
|  | final variable DB_MACHINE = dict( | ||||||
|  |     escape("one"), "hardware/machine/ibm/x3550/x_KDXXXX", | ||||||
|  |     escape("hyp01"), "hardware/machine/ibm/hs21xm/blade_99HXXXX", | ||||||
|  |     escape("vm"), "hardware/machine/one/example", | ||||||
|  | ); | ||||||
							
								
								
									
										56
									
								
								samples/Pan/functions.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								samples/Pan/functions.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | |||||||
|  | ################################################################################ | ||||||
|  | # This is 'namespaces/standard/pan/functions.tpl', a pan-templates's file | ||||||
|  | ################################################################################ | ||||||
|  | # | ||||||
|  | # VERSION:    3.2.7, 21/08/09 22:22 | ||||||
|  | # AUTHOR:     Martin Bock | ||||||
|  | # MAINTAINER: Example Maintainer <support@example.org> | ||||||
|  | # LICENSE:    http://cern.ch/eu-datagrid/license.html | ||||||
|  | # | ||||||
|  | ################################################################################ | ||||||
|  | # Coding style: emulate <TAB> characters with 4 spaces, thanks! | ||||||
|  | ################################################################################ | ||||||
|  | # | ||||||
|  | # Function definitions | ||||||
|  | # | ||||||
|  | ################################################################################ | ||||||
|  |  | ||||||
|  | declaration template pan/functions; | ||||||
|  |  | ||||||
|  | include 'pan/types'; | ||||||
|  |  | ||||||
|  | ############################################################ | ||||||
|  | ##= | ||||||
|  | ## @function push | ||||||
|  | ## @# push zero or more values onto the end of a list. | ||||||
|  | ##+If the list does not exist or is not defined a new list is | ||||||
|  | ##+created. | ||||||
|  | ## @syntax value:element | ||||||
|  | ## @param:value... the values to push onto list | ||||||
|  | ## @example | ||||||
|  | ##+# "/data" will contain list (1,2,3,4) | ||||||
|  | ##+"/data" = list(1,2); | ||||||
|  | ##+"/data" = push(3,4); | ||||||
|  | ##= | ||||||
|  | ############################################################ | ||||||
|  | function push = { | ||||||
|  |     # Get the reference to SELF or create an empty list | ||||||
|  |     # as necessary. | ||||||
|  |     if (exists(SELF) && is_list(SELF)) { | ||||||
|  |         v = SELF; | ||||||
|  |     } else if (!exists(SELF) || !is_defined(SELF)) { | ||||||
|  |         v = list(); | ||||||
|  |     } else { | ||||||
|  |         error("push can only be applied to a list"); | ||||||
|  |     }; | ||||||
|  |  | ||||||
|  |     # Merge the arguments into the given array.  Neither the | ||||||
|  |     # first/next or merge functions can be used because the | ||||||
|  |     # ARGV array cannot be directly referenced. | ||||||
|  |     i = 0; | ||||||
|  |     while (i < ARGC) { | ||||||
|  |         v[length(v)] = ARGV[i]; | ||||||
|  |         i = i + 1; | ||||||
|  |     }; | ||||||
|  |     v; | ||||||
|  | }; | ||||||
							
								
								
									
										22
									
								
								samples/Pan/infernalis.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								samples/Pan/infernalis.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | |||||||
|  | unique template site/ceph/server/infernalis; | ||||||
|  |  | ||||||
|  | include 'components/dirperm/config'; | ||||||
|  |  | ||||||
|  | "/software/components/dirperm/paths" = { | ||||||
|  |     foreach (idx; mp; value('/system/filesystems')) { | ||||||
|  |         if (match(mp['mountpoint'], format('^%s', CEPH_OSD_MP_BASE))) { | ||||||
|  |             append(SELF, dict( | ||||||
|  |                 "path", mp['mountpoint'], | ||||||
|  |                 "owner", "ceph:ceph", | ||||||
|  |                 "perm", "0755", | ||||||
|  |                 "type", "d", | ||||||
|  |             )); | ||||||
|  |         }; | ||||||
|  |     }; | ||||||
|  |     SELF; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | include 'common/sysctl/service'; | ||||||
|  | prefix "/software/components/metaconfig/services/{/etc/sysctl.conf}/contents"; | ||||||
|  |  | ||||||
|  | 'kernel.pid_max' = 4194303; | ||||||
							
								
								
									
										20
									
								
								samples/Pan/libvirt.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								samples/Pan/libvirt.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | |||||||
|  | unique template site/ceph/client/libvirt; | ||||||
|  |  | ||||||
|  | include 'site/ceph/client/config'; | ||||||
|  |  | ||||||
|  | variable CEPH_LIBVIRT_USER ?= 'oneadmin'; | ||||||
|  | variable CEPH_LIBVIRT_GROUP ?= CEPH_LIBVIRT_USER; | ||||||
|  | prefix '/software/components/metaconfig/services/{/etc/ceph/ceph.client.libvirt.keyring}'; | ||||||
|  |  | ||||||
|  | "contents" = if (is_defined(CEPH_LIBVIRT_SECRET)) { | ||||||
|  |     dict("client.libvirt", dict( | ||||||
|  |         "key", CEPH_LIBVIRT_SECRET, | ||||||
|  |         ) | ||||||
|  |     ); | ||||||
|  | } else { | ||||||
|  |     dict(); | ||||||
|  | }; | ||||||
|  | 'module' = 'tiny'; | ||||||
|  | 'mode' = 0600; | ||||||
|  | 'owner' = CEPH_LIBVIRT_USER; | ||||||
|  | 'group' = CEPH_LIBVIRT_GROUP; | ||||||
							
								
								
									
										19
									
								
								samples/Pan/link.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								samples/Pan/link.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | |||||||
|  | unique template site/dcache/link; | ||||||
|  |  | ||||||
|  | include 'components/dcache/config'; | ||||||
|  |  | ||||||
|  | ## links | ||||||
|  | ## default preference value | ||||||
|  | "/software/components/dcache/link/def_pref" = "10"; | ||||||
|  | ## list of links that will be ignored during configuration | ||||||
|  | "/software/components/dcache/link/ignore_link" = list(); | ||||||
|  | ## | ||||||
|  | "/software/components/dcache/link/links" = dict( | ||||||
|  |     ## out_buf_write: all outside to write to the storage through this buffer | ||||||
|  |     "out", dict("ugroup", list("all_net", "any_store"), "pgroup", list("out_buf"), "read", "10", "write", "10", "cache", "10"), | ||||||
|  |     "in", dict("ugroup", list("in_net", "any_store"), "pgroup", list("priv"), "read", "20", "write", "20", "cache", "20"), | ||||||
|  |     "dteam", dict("ugroup", list("dteam_store"), "pgroup", list("out_buf"), "read", "10", "write", "10", "cache", "10"), | ||||||
|  |     "ops", dict("ugroup", list("ops_store"), "pgroup", list("out_buf"), "read", "10", "write", "10", "cache", "10"), | ||||||
|  |     "cms", dict("ugroup", list("cms_store"), "pgroup", list("out_buf"), "read", "10", "write", "10", "cache", "10"), | ||||||
|  |     "test", dict("ugroup", list("test_store"), "pgroup", list("behar_test"), "read", "10", "write", "10", "cache", "10"), | ||||||
|  | ); | ||||||
							
								
								
									
										29
									
								
								samples/Pan/mysql.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								samples/Pan/mysql.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | |||||||
|  | unique template common/opennebula/mysql; | ||||||
|  |  | ||||||
|  | prefix "/software/packages"; | ||||||
|  | "{mysql-server}" = dict(); | ||||||
|  |  | ||||||
|  | include 'components/mysql/config'; | ||||||
|  |  | ||||||
|  | prefix  "/software/components/mysql"; | ||||||
|  | "serviceName" = { | ||||||
|  |     if (RPM_BASE_FLAVOUR_VERSIONID == 7) { | ||||||
|  |         "mariadb"; | ||||||
|  |     } else { | ||||||
|  |         "mysqld"; | ||||||
|  |     }; | ||||||
|  | }; | ||||||
|  | prefix "/software/components/mysql/servers/one"; | ||||||
|  | "host" = FULL_HOSTNAME; # localhost is added by component | ||||||
|  | "adminpwd" = OPENNEBULA_MYSQL_ADMIN; | ||||||
|  | "adminuser" = "root"; | ||||||
|  |  | ||||||
|  | prefix "/software/components/mysql/databases/opennebula"; | ||||||
|  | "server" = "one"; | ||||||
|  | "users/oneadmin/password" = OPENNEBULA_MYSQL_ONEADMIN; | ||||||
|  | "users/oneadmin/rights" = list("ALL PRIVILEGES"); | ||||||
|  | "createDb" = false; # if false, run script | ||||||
|  | "initScript/file" = "/dev/null"; | ||||||
|  |  | ||||||
|  | prefix "/software/components/chkconfig/service"; | ||||||
|  | "mysqld" = dict("on", "", "startstop", true); | ||||||
							
								
								
									
										18
									
								
								samples/Pan/nodes_properties.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								samples/Pan/nodes_properties.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | |||||||
|  | template config/nodes_properties; | ||||||
|  |  | ||||||
|  | variable SITES ?= list('example'); | ||||||
|  |  | ||||||
|  | #variable NEW_NODES_PROPS ?= { | ||||||
|  |  | ||||||
|  | variable NODES_PROPS = { | ||||||
|  |     nodes_add = dict(); | ||||||
|  |     nodes_props = dict(); | ||||||
|  |     allsites = SITES; | ||||||
|  |     ok = first(allsites, k, v); | ||||||
|  |     while (ok) { | ||||||
|  |         nodes_add = merge(create(format("config/%s_nodes_properties", v)), nodes_props); | ||||||
|  |         nodes_props = merge(nodes_add[v], nodes_props); | ||||||
|  |         ok = next(allsites, k, v); | ||||||
|  |     }; | ||||||
|  |     nodes_props; | ||||||
|  | }; | ||||||
							
								
								
									
										14
									
								
								samples/Pan/onevm.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								samples/Pan/onevm.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | |||||||
|  | unique template site/one/onevm; | ||||||
|  |  | ||||||
|  | include 'components/chkconfig/config'; | ||||||
|  |  | ||||||
|  | # set opennebula map | ||||||
|  | include 'quattor/aii/opennebula/schema'; | ||||||
|  | bind "/system/opennebula" = opennebula_vmtemplate; | ||||||
|  |  | ||||||
|  | include 'site/config-vm'; | ||||||
|  |  | ||||||
|  | include 'quattor/aii/opennebula/default'; | ||||||
|  |  | ||||||
|  | "/software/packages/{acpid}" = dict(); | ||||||
|  | "/software/components/chkconfig/service/acpid" = dict('on', '', 'startstop', true); | ||||||
							
								
								
									
										26
									
								
								samples/Pan/osd-fetch.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								samples/Pan/osd-fetch.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | |||||||
|  | unique template site/ceph/osdschemas/osd-fetch; | ||||||
|  |  | ||||||
|  | prefix '/software/components/ceph/clusters/ceph'; | ||||||
|  |  | ||||||
|  | variable FETCHED_OSDS = { | ||||||
|  |     t = dict(); | ||||||
|  |     rep = 2; | ||||||
|  |     foreach(idx; host; CEPH_NODES) { | ||||||
|  |         prof = replace('.data$', '.os', host); | ||||||
|  |         d = value(format('%s:/software/components/ceph/localdaemons/osds', prof)); | ||||||
|  |         t[shorten_fqdn(host)] = dict( | ||||||
|  |             'fqdn', host, | ||||||
|  |             'osds', d | ||||||
|  |         ); | ||||||
|  |  | ||||||
|  |         numosd = length(d); | ||||||
|  |         if (numosd > rep){ | ||||||
|  |             rep = numosd; | ||||||
|  |         }; | ||||||
|  |     }; | ||||||
|  |     all = dict('osdhosts', t, 'maxosd', rep); | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | 'osdhosts' = FETCHED_OSDS['osdhosts']; | ||||||
|  | variable CEPH_OSD_DOWN_REPORTERS ?= FETCHED_OSDS['maxosd'] + 2; | ||||||
|  | variable CEPH_OSD_DOWN_REPORTS ?=  CEPH_OSD_DOWN_REPORTERS + CEPH_OSD_DOWN_REPORTERS / 4 + 1; | ||||||
							
								
								
									
										45
									
								
								samples/Pan/pakiti.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										45
									
								
								samples/Pan/pakiti.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,45 @@ | |||||||
|  | # | ||||||
|  | # Generated by RepositoryTask on 12/09/13 15:41 | ||||||
|  | # | ||||||
|  | # name = pakiti | ||||||
|  | # owner = support@example.org | ||||||
|  | # url = http://quattor.web.lal.in2p3.fr/packages/pakiti | ||||||
|  | # | ||||||
|  |  | ||||||
|  | structure template repository/pakiti; | ||||||
|  |  | ||||||
|  | "name" = "pakiti"; | ||||||
|  | "owner" = "support@example.org"; | ||||||
|  | "protocols" = list( | ||||||
|  |     dict("name", "http", | ||||||
|  |     "url", "http://quattor.web.lal.in2p3.fr/packages/pakiti") | ||||||
|  | ); | ||||||
|  |  | ||||||
|  | "contents" = dict( | ||||||
|  |     # pkg = pakiti-client-2.1.4-1-noarch | ||||||
|  | escape("pakiti-client-2.1.4-1-noarch"), dict("name", "pakiti-client", "version", "2.1.4-1", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-client-2.1.4-2-noarch | ||||||
|  | escape("pakiti-client-2.1.4-2-noarch"), dict("name", "pakiti-client", "version", "2.1.4-2", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-client-2.1.4-3-noarch | ||||||
|  | escape("pakiti-client-2.1.4-3-noarch"), dict("name", "pakiti-client", "version", "2.1.4-3", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-client-2.1.4-4-noarch | ||||||
|  | escape("pakiti-client-2.1.4-4-noarch"), dict("name", "pakiti-client", "version", "2.1.4-4", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-client-2.1.5-0-noarch | ||||||
|  | escape("pakiti-client-2.1.5-0-noarch"), dict("name", "pakiti-client", "version", "2.1.5-0", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-client-manual-2.1.4-2-noarch | ||||||
|  | escape("pakiti-client-manual-2.1.4-2-noarch"), dict("name", "pakiti-client-manual", "version", "2.1.4-2", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-client-manual-2.1.4-3-noarch | ||||||
|  | escape("pakiti-client-manual-2.1.4-3-noarch"), dict("name", "pakiti-client-manual", "version", "2.1.4-3", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-client-manual-2.1.4-4-noarch | ||||||
|  | escape("pakiti-client-manual-2.1.4-4-noarch"), dict("name", "pakiti-client-manual", "version", "2.1.4-4", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-server-2.1.4-1-noarch | ||||||
|  | escape("pakiti-server-2.1.4-1-noarch"), dict("name", "pakiti-server", "version", "2.1.4-1", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-server-2.1.4-2-noarch | ||||||
|  | escape("pakiti-server-2.1.4-2-noarch"), dict("name", "pakiti-server", "version", "2.1.4-2", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-server-2.1.4-3-noarch | ||||||
|  | escape("pakiti-server-2.1.4-3-noarch"), dict("name", "pakiti-server", "version", "2.1.4-3", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-server-2.1.4-4-noarch | ||||||
|  | escape("pakiti-server-2.1.4-4-noarch"), dict("name", "pakiti-server", "version", "2.1.4-4", "arch", "noarch"), | ||||||
|  |     # pkg = pakiti-server-2.1.5-0-noarch | ||||||
|  | escape("pakiti-server-2.1.5-0-noarch"), dict("name", "pakiti-server", "version", "2.1.5-0", "arch", "noarch"), | ||||||
|  | ); | ||||||
							
								
								
									
										30
									
								
								samples/Pan/purge_fqan_accounts.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								samples/Pan/purge_fqan_accounts.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,30 @@ | |||||||
|  | # Template installing a script to remove all accounts with 'fqan' in | ||||||
|  | # their name. Used after fixing VOConfigTask in SCDB 2.3.2 to remove | ||||||
|  | # obsolete accounts not removed by ncm-accounts. | ||||||
|  | # | ||||||
|  | # The script is added and executed only on nodes where NODE_VO_ACCOUNTS | ||||||
|  | # is true. It is intended to be run as GLITE_BASE_CONFIG_SITE (define | ||||||
|  | # this variable to the script namespace). | ||||||
|  | # | ||||||
|  | # Michel Jouvin - 13/9/09 | ||||||
|  |  | ||||||
|  | unique template site/misc/purge_fqan_accounts; | ||||||
|  |  | ||||||
|  | variable LAL_PURGE_ACCOUNTS_SCRIPT = '/tmp/purge_fqan_accounts'; | ||||||
|  |  | ||||||
|  | include 'components/filecopy/config'; | ||||||
|  |  | ||||||
|  | '/software/components/filecopy/services' = { | ||||||
|  |     if ( is_defined(NODE_VO_ACCOUNTS) && NODE_VO_ACCOUNTS ) { | ||||||
|  |         debug('Adding purge_fqan_accounts'); | ||||||
|  |         SELF[escape(LAL_PURGE_ACCOUNTS_SCRIPT)] = dict( | ||||||
|  |             'config', file_contents('site/misc/purge_fqan_accounts.sh'), | ||||||
|  |             'owner', 'root:root', | ||||||
|  |             'perms', '0755', | ||||||
|  |             'restart', LAL_PURGE_ACCOUNTS_SCRIPT, | ||||||
|  |         ); | ||||||
|  |     } else { | ||||||
|  |         debug(format('VO accounts disabled (NODE_VO_ACCOUNTS=%s', NODE_VO_ACCOUNTS)); | ||||||
|  |     }; | ||||||
|  |     SELF; | ||||||
|  | }; | ||||||
							
								
								
									
										30
									
								
								samples/Pan/resources.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								samples/Pan/resources.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,30 @@ | |||||||
|  | unique template site/one/resources; | ||||||
|  |  | ||||||
|  | # datastores templates | ||||||
|  | prefix "/software/components/opennebula/datastores/0"; | ||||||
|  | "name" = "ceph.example"; | ||||||
|  | "bridge_list" = list(FULL_HOSTNAME); # for now, do this from the headnode | ||||||
|  | "ceph_host" = CEPH_MON_HOSTS; | ||||||
|  | "ceph_secret" = CEPH_LIBVIRT_UUID; | ||||||
|  | "ceph_user" = "libvirt"; | ||||||
|  | "ceph_user_key" = CEPH_LIBVIRT_SECRET; | ||||||
|  | "datastore_capacity_check" = true; | ||||||
|  | "pool_name" = "one"; | ||||||
|  | "type" = "IMAGE_DS"; | ||||||
|  | "rbd_format" = 2; | ||||||
|  |  | ||||||
|  | prefix "/software/components/opennebula/datastores/1"; | ||||||
|  | "name" = "nfs.example"; | ||||||
|  | "datastore_capacity_check" = true; | ||||||
|  | "ds_mad" = "fs"; | ||||||
|  | "tm_mad" = "shared"; | ||||||
|  | "type" = "IMAGE_DS"; | ||||||
|  |  | ||||||
|  | # untouchables resources | ||||||
|  | prefix "/software/components/opennebula/untouchables"; | ||||||
|  | "datastores" = list('system'); | ||||||
|  |  | ||||||
|  | # extra conf | ||||||
|  | prefix "/software/components/opennebula"; | ||||||
|  | "ssh_multiplex" = true; | ||||||
|  | "tm_system_ds" = "ssh"; | ||||||
							
								
								
									
										20
									
								
								samples/Pan/simple.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								samples/Pan/simple.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | |||||||
|  | unique template site/ceph/osdlocal/simple; | ||||||
|  |  | ||||||
|  | variable CEPH_JOURNAL_PART ?= dict(); | ||||||
|  |  | ||||||
|  | prefix '/software/components/ceph'; | ||||||
|  |  | ||||||
|  | 'localdaemons/osds' = { | ||||||
|  |     d = dict(); | ||||||
|  |     foreach(idx; osdmnt; value('/system/filesystems')) { | ||||||
|  |         part = osdmnt['block_device']; | ||||||
|  |         disk = replace('\S+/([a-zA-Z]+)[0-9]*$', '$1', part); | ||||||
|  |         if (match(osdmnt['mountpoint'], '/var/lib/ceph/osd/\w+')){ | ||||||
|  |             d[escape(osdmnt['mountpoint'])] = dict( | ||||||
|  |                 'journal_path', format('/dev/%s%d', disk, CEPH_JOURNAL_PART['data']), | ||||||
|  |                 'crush_weight', weight_of(part), | ||||||
|  |             ); | ||||||
|  |         }; | ||||||
|  |     }; | ||||||
|  |     d; | ||||||
|  | }; | ||||||
							
								
								
									
										151
									
								
								samples/Pan/types.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										151
									
								
								samples/Pan/types.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,151 @@ | |||||||
|  | @contributor{ | ||||||
|  |   name = First Contributor | ||||||
|  |   email = first@example.org | ||||||
|  | } | ||||||
|  | @contributor{ | ||||||
|  |   name = Second Contributor | ||||||
|  |   email = second@example.org | ||||||
|  | } | ||||||
|  | @documentation{ | ||||||
|  | Data type and function definitions for basic types | ||||||
|  | } | ||||||
|  |  | ||||||
|  | declaration template pan/types; | ||||||
|  |  | ||||||
|  | include 'pan/legacy'; | ||||||
|  |  | ||||||
|  | @documentation{ | ||||||
|  | This type implements a date/time format consistent with | ||||||
|  | ASN.1 typically used by LDAP.  The actual specification is the | ||||||
|  | "GeneralizedTime" format as specified on page 38 of the X.208 | ||||||
|  | ITU-T recommendation and references within. | ||||||
|  |  | ||||||
|  | Ex: 20040825120123Z | ||||||
|  |     20040825120123+0100 | ||||||
|  |     20040825120123,5 | ||||||
|  |     20040825120123.5 | ||||||
|  |     20040825120123.5-0123 | ||||||
|  | } | ||||||
|  | function is_asndate = { | ||||||
|  |     # Check cardinality and type of argument. | ||||||
|  |     if (ARGC != 1 || !is_string(ARGV[0])) | ||||||
|  |         error("usage: is_asndate(string)"); | ||||||
|  |  | ||||||
|  |     # Match the datetime pattern, extracting interesting fields. | ||||||
|  |     result = matches(ARGV[0], | ||||||
|  |         '^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(?:[,\.](\d+))?([Zz]|(?:[-+]\d{2}\d{2}))?$'); | ||||||
|  |  | ||||||
|  |     if (length(result) >= 7) { | ||||||
|  |         # Do further tests on various components of the date. | ||||||
|  |         # NOTE: the to_long(to_double(x)) construct below is to avoid having | ||||||
|  |         # the to_long function treat strings with leading zeros as octal | ||||||
|  |         # numbers.  E.g. to_long("09") will throw an exception because '9' is | ||||||
|  |         # not a valid octal digit. | ||||||
|  |         year = to_long(result[1]); | ||||||
|  |         month = to_long(to_double(result[2])); | ||||||
|  |         day = to_long(to_double(result[3])); | ||||||
|  |         hour = to_long(to_double(result[4])); | ||||||
|  |         minute = to_long(to_double(result[5])); | ||||||
|  |         second = to_long(to_double(result[6])); | ||||||
|  |  | ||||||
|  |         frac = 0; | ||||||
|  |         if (length(result) > 7) { | ||||||
|  |             frac = to_long(to_double(result[7])); | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         zone = '+0000'; | ||||||
|  |         if (length(result) > 8) { | ||||||
|  |             zone = result[8]; | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         # Check the range of months. | ||||||
|  |         if (month < 1 || month > 12) { | ||||||
|  |             error("is_asndate: invalid month"); | ||||||
|  |             return(false); | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         # Check the range of days. | ||||||
|  |         if (day < 1 || day > 31) { | ||||||
|  |             error("is_asndate: invalid day"); | ||||||
|  |             return(false); | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         # Be more specific on the days in each month. | ||||||
|  |         if (month == 4 || month == 6 || month == 9 || month == 11) { | ||||||
|  |             if (day > 30) { | ||||||
|  |                 error("is_asndate: invalid day"); | ||||||
|  |             }; | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         # February is always a bother.  Too lazy to check that the leap | ||||||
|  |         # years have been specified correctly. | ||||||
|  |         if (month == 2 && day > 29) { | ||||||
|  |             error("is_asndate: invalid day"); | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         # Check the time. | ||||||
|  |         if (hour > 23) { | ||||||
|  |             error("is_asndate: invalid hour"); | ||||||
|  |             return(false); | ||||||
|  |         }; | ||||||
|  |         if (minute > 59) { | ||||||
|  |             error("is_asndate: invalid minute"); | ||||||
|  |             return(false); | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         # Allow for leap seconds here (since it is easy). | ||||||
|  |         if (second > 60) { | ||||||
|  |             error("is_asndate: invalid minute"); | ||||||
|  |             return(false); | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |         # Check the time zone format. | ||||||
|  |         if (zone != "Z" && zone != "z") { | ||||||
|  |             tz = matches(zone, '^[-+](\d{2})(\d{2})$'); | ||||||
|  |  | ||||||
|  |             hoffset = to_long(to_double(tz[1])); | ||||||
|  |             moffset = to_long(to_double(tz[2])); | ||||||
|  |  | ||||||
|  |             if (hoffset >= 12) { | ||||||
|  |                 error("is_asndate: invalid hour offset in time zone"); | ||||||
|  |                 return(false); | ||||||
|  |             }; | ||||||
|  |             if (moffset > 59) { | ||||||
|  |                 error("is_asndate: invalid minute offset in time zone"); | ||||||
|  |                 return(false); | ||||||
|  |             }; | ||||||
|  |         }; | ||||||
|  |  | ||||||
|  |     } else { | ||||||
|  |         error("is_asndate: invalid format for time"); | ||||||
|  |         return(false); | ||||||
|  |     }; | ||||||
|  |  | ||||||
|  |     # If it gets to this point, then the date must be OK. | ||||||
|  |     true; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | type type_asndate = string with { | ||||||
|  |     is_asndate(SELF); | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | @documentation{ | ||||||
|  |     desc = Type that enforces the existence of a named interface. | ||||||
|  | } | ||||||
|  | type valid_interface = string with { | ||||||
|  |     if (exists(format('/system/network/interfaces/%s', SELF))) { | ||||||
|  |         return(true); | ||||||
|  |     }; | ||||||
|  |     foreach(ifc; attr; value('/system/network/interfaces')) { | ||||||
|  |         if (attr['device'] == SELF){ | ||||||
|  |             return(true); | ||||||
|  |         }; | ||||||
|  |     }; | ||||||
|  |     false; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | @documentation{ | ||||||
|  |     desc = CPU architectures understood by Quattor | ||||||
|  | } | ||||||
|  | type cpu_architecture = string with match (SELF, '^(i386|ia64|x86_64|sparc|aarch64|ppc64(le)?)$'); | ||||||
							
								
								
									
										32
									
								
								samples/Pan/unit.pan
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								samples/Pan/unit.pan
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | |||||||
|  | unique template site/dcache/unit; | ||||||
|  |  | ||||||
|  | include 'components/dcache/config'; | ||||||
|  |  | ||||||
|  | ## unit/ugroups | ||||||
|  | ## list of ugroups that will be ignored during configuration | ||||||
|  | "/software/components/dcache/unit/ignore_ugroup" = list(); | ||||||
|  | "/software/components/dcache/unit/units" = dict( | ||||||
|  |     "protocol", list( | ||||||
|  |         dict("cond", "*/*", "ugroup", list("default_protocol")) | ||||||
|  |     ), | ||||||
|  |     "net", list( | ||||||
|  |         dict("cond", "192.168.0.0/255.255.0.0", "ugroup", list("in_net", "all_net")), | ||||||
|  |         dict("cond", "192.168.10.0/255.255.255.0", "ugroup", list("in_server", "in_net", "all_net")), | ||||||
|  |         dict("cond", "192.168.11.0/255.255.255.0", "ugroup", list("in_wn", "in_net", "all_net")), | ||||||
|  |         dict("cond", "192.168.12.0/255.255.255.0", "ugroup", list("in_wn", "in_net", "all_net")), | ||||||
|  |         dict("cond", "192.168.13.0/255.255.255.0", "ugroup", list("in_wn", "in_net", "all_net")), | ||||||
|  |         dict("cond", "192.168.14.0/255.255.255.0", "ugroup", list("in_wn", "in_net", "all_net")), | ||||||
|  |         dict("cond", "192.168.15.0/255.255.255.0", "ugroup", list("in_wn", "in_net", "all_net")), | ||||||
|  |         dict("cond", "192.168.16.0/255.255.255.0", "ugroup", list("in_wn", "in_net", "all_net")), | ||||||
|  |         dict("cond", "192.168.17.0/255.255.255.0", "ugroup", list("in_wn", "in_net", "all_net")), | ||||||
|  |         dict("cond", "0.0.0.0/0.0.0.0", "ugroup", list("all_net")), | ||||||
|  |         ), | ||||||
|  |     "store", list( | ||||||
|  |         dict("cond", "*@*", "ugroup", list("any_store")), | ||||||
|  |         dict("cond", "myStore:STRING@osm", "ugroup", list("default_store", "any_store")), | ||||||
|  |         dict("cond", "dteam:dteam-base@osm", "ugroup", list("dteam_store", "any_store")), | ||||||
|  |         dict("cond", "ops:ops-base@osm", "ugroup", list("ops_store", "any_store")), | ||||||
|  |         dict("cond", "cms:cms-base@osm", "ugroup", list("cms_store", "any_store")), | ||||||
|  |         dict("cond", "test:cms-test@osm", "ugroup", list("test_store")), | ||||||
|  |     ), | ||||||
|  | ); | ||||||
							
								
								
									
										1
									
								
								vendor/README.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								vendor/README.md
									
									
									
									
										vendored
									
									
								
							| @@ -250,6 +250,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting | |||||||
| - **Ox:** [andreashetland/sublime-text-ox](https://github.com/andreashetland/sublime-text-ox) | - **Ox:** [andreashetland/sublime-text-ox](https://github.com/andreashetland/sublime-text-ox) | ||||||
| - **Oz:** [eregon/oz-tmbundle](https://github.com/eregon/oz-tmbundle) | - **Oz:** [eregon/oz-tmbundle](https://github.com/eregon/oz-tmbundle) | ||||||
| - **P4:** [TakeshiTseng/atom-language-p4](https://github.com/TakeshiTseng/atom-language-p4) | - **P4:** [TakeshiTseng/atom-language-p4](https://github.com/TakeshiTseng/atom-language-p4) | ||||||
|  | - **Pan:** [quattor/language-pan](https://github.com/quattor/language-pan) | ||||||
| - **Papyrus:** [Kapiainen/SublimePapyrus](https://github.com/Kapiainen/SublimePapyrus) | - **Papyrus:** [Kapiainen/SublimePapyrus](https://github.com/Kapiainen/SublimePapyrus) | ||||||
| - **Parrot Internal Representation:** [textmate/parrot.tmbundle](https://github.com/textmate/parrot.tmbundle) | - **Parrot Internal Representation:** [textmate/parrot.tmbundle](https://github.com/textmate/parrot.tmbundle) | ||||||
| - **Pascal:** [textmate/pascal.tmbundle](https://github.com/textmate/pascal.tmbundle) | - **Pascal:** [textmate/pascal.tmbundle](https://github.com/textmate/pascal.tmbundle) | ||||||
|   | |||||||
							
								
								
									
										1
									
								
								vendor/grammars/language-pan
									
									
									
									
										vendored
									
									
										Submodule
									
								
							
							
								
								
								
								
								
							
						
						
									
										1
									
								
								vendor/grammars/language-pan
									
									
									
									
										vendored
									
									
										Submodule
									
								
							 Submodule vendor/grammars/language-pan added at 2c7e36e993
									
								
							
							
								
								
									
										38
									
								
								vendor/licenses/grammar/language-pan.txt
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								vendor/licenses/grammar/language-pan.txt
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | |||||||
|  | --- | ||||||
|  | type: grammar | ||||||
|  | name: language-pan | ||||||
|  | license: mit | ||||||
|  | --- | ||||||
|  | Copyright (c) 2014 GitHub Inc. | ||||||
|  |  | ||||||
|  | Copyright (c) 2017 Science & Technology Facilities Council | ||||||
|  |  | ||||||
|  | Permission is hereby granted, free of charge, to any person obtaining | ||||||
|  | a copy of this software and associated documentation files (the | ||||||
|  | "Software"), to deal in the Software without restriction, including | ||||||
|  | without limitation the rights to use, copy, modify, merge, publish, | ||||||
|  | distribute, sublicense, and/or sell copies of the Software, and to | ||||||
|  | permit persons to whom the Software is furnished to do so, subject to | ||||||
|  | the following conditions: | ||||||
|  |  | ||||||
|  | The above copyright notice and this permission notice shall be | ||||||
|  | included in all copies or substantial portions of the Software. | ||||||
|  |  | ||||||
|  | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | ||||||
|  | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | ||||||
|  | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND | ||||||
|  | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE | ||||||
|  | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION | ||||||
|  | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION | ||||||
|  | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | ||||||
|  |  | ||||||
|  | -------------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | This package was derived from a TextMate bundle located at | ||||||
|  | https://github.com/textmate/shellscript.tmbundle and distributed under the | ||||||
|  | following license, located in `README.mdown`: | ||||||
|  |  | ||||||
|  | Permission to copy, use, modify, sell and distribute this | ||||||
|  | software is granted. This software is provided "as is" without | ||||||
|  | express or implied warranty, and with no claim as to its | ||||||
|  | suitability for any purpose. | ||||||
		Reference in New Issue
	
	Block a user