3 # Copyright (C) 2000-2013 PLD-Team <feedback@pld-linux.org>
5 # Sebastian Zagrodzki <zagrodzki@pld-linux.org>
6 # Jacek Konieczny <jajcus@pld-linux.org>
7 # Andrzej Krzysztofowicz <ankry@pld-linux.org>
8 # Jakub Bogusz <qboosh@pld-linux.org>
9 # Elan Ruusamäe <glen@pld-linux.org>
11 # See cvs log pldnotify.awk for list of contributors
14 # - "SourceXDownload" support (use given URLs if present instead of cut-down SourceX URLs)
15 # - "SourceXActiveFTP" support
16 # - support debian/watch http://wiki.debian.org/debian/watch/
19 # to test run this, run:
20 # $ awk -vDEBUG=1 pldnotify.awk < specfile
22 # To get full out of it, you need to have following tools installed:
23 # - perl, sed, wget, coreutils, util-linux
24 # - perl-HTML-Tree (HTML::TreeBuilder module) for better links parser (-vUSE_PERL=0 to disable)
25 # - pear (php-pear-PEAR) for php-pear package updates
26 # - npm for nodejs packages
27 # - gem (ruby-rubygems) for ruby/rubygem packages
28 # - node to parse json from release-monitoring.org
30 # Additionally "mirrors" file in current dir, controls local mirrors you prefer
37 # print strftime("%Y-%m-%d %H:%M:%S ") s >> "/dev/stderr"
38 print s >> "/dev/stderr"
41 function fixedsub(s1,s2,t, ind) {
42 # substitutes fixed strings (not regexps)
43 if (ind = index(t,s1)) {
44 t = substr(t, 1, ind-1) s2 substr(t, ind+length(s1))
50 if ((s~"pre")||(s~"PRE")||(s~"beta")||(s~"BETA")||(s~"alpha")||(s~"ALPHA")||(s~"rc")||(s~"RC")) {
58 function compare_ver(v1,v2) {
59 # compares version numbers
60 while (match(v1,/[a-zA-Z][0-9]|[0-9][a-zA-Z]/))
61 v1=(substr(v1,1,RSTART) "." substr(v1,RSTART+RLENGTH-1))
62 while (match(v2,/[a-zA-Z][0-9]|[0-9][a-zA-Z]/))
63 v2=(substr(v2,1,RSTART) "." substr(v2,RSTART+RLENGTH-1))
70 count=split(v1,v1a,"\.")
71 count2=split(v2,v2a,"\.")
73 if (count<count2) mincount=count
76 for (i=1; i<=mincount; i++) {
77 if (v1a[i]=="") v1a[i]=0
78 if (v2a[i]=="") v2a[i]=0
82 if ((v1a[i]~/[0-9]/)&&(v2a[i]~/[0-9]/)) {
83 if (length(v2a[i])>length(v1a[i]))
85 else if (v2a[i]>v1a[i])
87 else if (length(v1a[i])>length(v2a[i]))
89 else if (v1a[i]>v2a[i])
91 } else if ((v1a[i]~/[A-Za-z]/)&&(v2a[i]~/[A-Za-z]/)) {
94 else if (v1a[i]>v2a[i])
96 } else if (ispre(v1a[i]) == 1)
101 if ((count2==mincount)&&(count!=count2)) {
102 for (i=count2+1; i<=count; i++)
103 if (ispre(v1a[i]) == 1)
106 } else if (count!=count2) {
107 for (i=count+1; i<=count2; i++)
108 if (ispre(v2a[i]) == 1)
115 function compare_ver_dec(v1,v2) {
116 # compares version numbers as decimal floats
117 while (match(v1,/[0-9][a-zA-Z]/))
118 v1=(substr(v1,1,RSTART) "." substr(v1,RSTART+RLENGTH-1))
119 while (match(v2,/[0-9][a-zA-Z]/))
120 v2=(substr(v2,1,RSTART) "." substr(v2,RSTART+RLENGTH-1))
125 count=split(v1,v1a,"\.")
126 count2=split(v2,v2a,"\.")
128 if (count<count2) mincount=count
131 for (i=1; i<=mincount; i++) {
132 if (v1a[i]=="") v1a[i]=0
133 if (v2a[i]=="") v2a[i]=0
135 d("v1[i] == " v1a[i])
136 d("v2[i] == " v2a[i])
137 if ((v1a[i]~/[0-9]/)&&(v2a[i]~/[0-9]/)) {
139 if (0+("." v2a[i])>0+("." v1a[i]))
141 else if (0+("." v1a[i])>0+("." v2a[i]))
144 if (length(v2a[i])>length(v1a[i]))
146 else if (v2a[i]>v1a[i])
148 else if (length(v1a[i])>length(v2a[i]))
150 else if (v1a[i]>v2a[i])
153 } else if ((v1a[i]~/[A-Za-z]/)&&(v2a[i]~/[A-Za-z]/)) {
156 else if (v1a[i]>v2a[i])
158 } else if (ispre(v1a[i]) == 1)
163 if ((count2==mincount)&&(count!=count2)) {
164 for (i=count2+1; i<=count; i++)
165 if (ispre(v1a[i]) == 1)
168 } else if (count!=count2) {
169 for (i=count+1; i<=count2; i++)
170 if (ispre(v2a[i]) == 1)
177 function link_seen(link) {
178 for (seenlink in frameseen) {
179 if (seenlink == link) {
180 d("Link: [" link "] seen already, skipping...")
188 function mktemp( _cmd, _tmpfile) {
189 _cmd = "mktemp /tmp/XXXXXX"
190 _cmd | getline _tmpfile
195 # fix link to artificial one that will be recognized rest of this script
196 function postfix_link(url, link, oldlink) {
198 if ((url ~/^(http|https):\/\/github.com\//) && (link ~ /.*\/tarball\//)) {
199 gsub(".*\/tarball\/", "", link)
200 link = link ".tar.gz"
202 if (oldlink != link) {
203 d("POST FIXED URL [ " oldlink " ] to [ " link " ]")
208 # use perl HTML::TreeBuilder module to extract links from html
209 # it returns TAGNAME LINK in output which is pretty stright forward to parse in awk
210 function extract_links_cmd(tmpfile) {
211 return "perl -MHTML::TreeBuilder -e ' \
212 my $content = join q//, <>; \
213 my $root = new HTML::TreeBuilder; \
214 $root->parse($content); \
217 for (@{$root->extract_links(qw(a iframe))}) { \
218 my($link, $element, $attr, $tag) = @$_; \
219 $links{$link} = $tag; \
222 while (my($link, $tag) = each %links) { \
223 print $tag, q/ /, $link, $/; \
228 # get all <A HREF=..> tags from specified URL
229 function get_links(url,filename, errno,link,oneline,retval,odp,wholeodp,lowerodp,tmpfile,cmd) {
234 tmpfileerr = mktemp()
236 if (url ~ /^http:\/\/(download|downloads|dl)\.(sf|sourceforge)\.net\//) {
238 # http://dl.sourceforge.net/threestore/
239 # http://downloads.sourceforge.net/project/mediainfo/source/mediainfo/
240 gsub("^http://(download|downloads|dl)\.(sf|sourceforge)\.net/", "", newurl)
241 gsub("^project/", "", newurl)
242 gsub("/.*", "", newurl)
243 url = "http://sourceforge.net/projects/" newurl "/rss?path=/"
244 d("sf url, mangled url to: " url)
246 } else if (url ~ /^http:\/\/(.*)\.googlecode\.com\/files\//) {
247 gsub("^http://", "", url)
248 gsub("\..*", "", url)
249 url = "http://code.google.com/p/" url "/downloads/list"
250 d("googlecode url, mangled url to: " url)
252 } else if (url ~ /^http:\/\/pecl.php.net\/get\//) {
253 gsub("-.*", "", filename)
254 url = "http://pecl.php.net/package/" filename
255 d("pecl.php.net url, mangled url to: " url)
257 } else if (url ~/http:\/\/cdn.mysql.com\//) {
258 gsub("http:\/\/cdn.mysql.com\/", "", url)
259 url = "http://vesta.informatik.rwth-aachen.de/mysql/" url
260 d("mysql CDN, mangled url to: " url)
262 } else if (url ~ /^(http|https):\/\/launchpad\.net\/(.*)\//) {
263 gsub("^(http|https):\/\/launchpad\.net\/", "", url)
264 gsub("\/.*/", "", url)
265 url = "https://code.launchpad.net/" url "/+download"
266 d("main launchpad url, mangled url to: " url)
268 } else if (url ~ /^(http|https):\/\/edge\.launchpad\.net\/(.*)\//) {
269 gsub("^(http|https):\/\/edge\.launchpad\.net\/", "", url)
270 gsub("\/.*/", "", url)
271 url = "https://edge.launchpad.net/" url "/+download"
272 d("edge launchpad url, mangled url to: " url)
274 } else if (url ~ /^(http|https):\/\/github.com\/.*\/(.*)\/tarball\//) {
275 gsub("\/tarball\/.*", "/downloads", url)
276 d("github tarball url, mangled url to: " url)
278 } else if (url ~ /^(http|https):\/\/github.com\/.*\/(.*)\/archive\//) {
279 gsub("\/archive\/.*", "/tags", url)
280 d("github archive url, mangled url to: " url)
282 } else if (url ~ /^(http|https):\/\/bitbucket.org\/.*\/get\/.*/) {
283 # https://bitbucket.org/logilab/pylint/get/tip.tar.bz2 -> https://bitbucket.org/logilab/pylint/downloads
284 gsub("\/get\/.*", "/downloads", url)
285 d("github bitbucket url, mangled url to: " url)
287 } else if (url ~ /^(http|https):\/\/cgit\..*\/(.*)\/snapshot\//) {
288 gsub("\/snapshot\/.*", "/", url)
289 d("cgit snapshot tarball url, mangled url to: " url)
291 } else if (url ~ /^(http|https):\/\/www2\.aquamaniac\.de\/sites\/download\//) {
292 url = "http://www2.aquamaniac.de/sites/download/packages.php"
293 d("aquamaniac.de tarball url, mangled url to: " url)
295 } else if (url ~ /^(http|https):\/\/www.process-one.net\/downloads\/ejabberd\//) {
296 url = "http://www.process-one.net/en/ejabberd/archive/"
297 d("ejabberd tarball url, mangled url to: " url)
299 } else if (url ~ /^(http|https):\/\/llvm.org\/releases\//) {
300 url = "http://llvm.org/releases/download.html"
301 d("llvm tarball url, mangled url to: " url)
303 } else if (url ~ /^(http|https):\/\/download\.owncloud\.org\/community\//) {
304 url = "http://owncloud.org/changelog/"
305 d("owncloud tarball url, mangled url to: " url)
307 } else if (url ~ /^(http|https):\/\/hackage\.haskell\.org\/packages\/archive\//) {
308 gsub("\/packages\/archive","/package",url)
309 d("hackage haskell tarball url, mangled url to: " url)
311 } else if (url ~ /^http:\/\/www.taskwarrior.org\/download\//) {
312 url = "http://taskwarrior.org/projects/taskwarrior/wiki/Download"
313 d("taskwarrior tarball url, mangled url to: " url)
314 } else if (url ~/^http:\/\/www.rarlab.com\/rar\// && filename ~ /^unrarsrc/) {
315 url = "http://www.rarlab.com/rar_add.htm"
316 d("unrar tarball url, mangled url to: " url)
317 } else if (url ~/^http:\/\/www.rarlab.com\/rar\//) {
318 url = "http://www.rarlab.com/download.htm"
319 d("rar tarball url, mangled url to: " url)
320 } else if (url ~/^(http|https):\/\/pypi.python.org\/packages\/source\/.*/) {
321 gsub("/packages/source/[a-zA-Z0-9]/", "/pypi/", url)
322 d("pypi.python.org url, mangled url to: " url)
325 d("Retrieving: " url)
326 user_agent = "Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2) Gecko/20100129 PLD/3.0 (Th) Iceweasel/3.6"
327 cmd = "wget -t 2 -T 45 --user-agent \"" user_agent "\" -nv -O - \"" url "\" --passive-ftp --no-check-certificate > " tmpfile " 2> " tmpfileerr
333 d("Reading failure response...")
335 while (getline oneline < tmpfileerr)
336 wholeerr = (wholeerr " " oneline)
337 d("Error Response: " wholeerr)
339 system("rm -f " tmpfile)
340 system("rm -f " tmpfileerr)
341 retval = ("WGET ERROR: " errno ": " wholeerr)
344 system("rm -f " tmpfileerr)
347 sub(/[^\/]+$/, "", urldir)
350 cmd = extract_links_cmd(tmpfile)
351 while (cmd | getline) {
353 link = substr($0, length(tag) + 2)
355 if (tag == "iframe") {
362 if (link_seen(link)) {
365 retval = (retval " " get_links(link))
368 if (link_seen(link)) {
372 retval = (retval " " link)
380 d("Reading success response...")
381 while (getline oneline < tmpfile) {
382 wholeodp = (wholeodp " " oneline)
383 # d("Response: " wholeodp)
385 d("Reponse read done...")
386 system("rm -f " tmpfile)
388 # MATCH one of these:
389 #while (match(wholeodp, /<([aA]|[fF][rR][aA][mM][eE])[ \t][^>]*>/) > 0) {
390 #while (match(wholeodp, /<link>[^<]*<\/link>/) > 0) {
392 while (match(wholeodp, /(<link>[^<]*<\/link>|<([aA]|[fF][rR][aA][mM][eE])[ \t][^>]*>)/) > 0) {
393 d("Processing links...")
394 odp = substr(wholeodp,RSTART,RLENGTH);
395 wholeodp = substr(wholeodp,RSTART+RLENGTH);
397 lowerodp = tolower(odp);
398 if (lowerodp ~ /<frame[ \t]/) {
399 sub(/[sS][rR][cC]=[ \t]*/, "src=", odp);
400 match(odp, /src="[^"]+"/)
401 newurl = substr(odp, RSTART+5, RLENGTH-6)
403 if (newurl !~ /\//) {
404 newurl=(urldir newurl)
405 d("Frame->: " newurl)
408 if (link_seen(newurl)) {
413 retval = (retval " " get_links(newurl))
414 d("href('condition1': " newurl)
415 } else if (lowerodp ~ /href=[ \t]*"[^"]*"/) {
416 sub(/[hH][rR][eE][fF]=[ \t]*"/,"href=\"",odp)
417 match(odp,/href="[^"]*"/)
418 link=substr(odp,RSTART,RLENGTH)
419 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
420 link=substr(link,7,length(link)-7)
421 link=postfix_link(url, link)
423 if (link_seen(link)) {
428 # link ends with at least 2 digit version
430 if (link ~ /^.*\/[v]*[0-9\.]+[0-9]\/$/)
431 mlink = get_links(link)
433 retval = (retval " " link " " mlink)
434 d("href('condition2'): " link)
435 } else if (lowerodp ~ /href=[ \t]*'[^']*'/) {
436 sub(/[hH][rR][eE][fF]=[ \t]*'/,"href='",odp)
437 match(odp,/href='[^']*'/)
438 link=substr(odp,RSTART,RLENGTH)
439 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
440 link=substr(link,7,length(link)-7)
441 link=postfix_link(url, link)
443 if (link_seen(link)) {
448 retval = (retval " " link)
449 d("href('condition3'): " link)
450 } else if (lowerodp ~ /href=[ \t]*[^ \t>]*/) {
451 sub(/[hH][rR][eE][fF]=[ \t]*/,"href=",odp)
452 match(odp,/href=[^ \t>]*/)
453 link=substr(odp,RSTART,RLENGTH)
454 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
455 link=substr(link,6,length(link)-5)
457 if (link_seen(link)) {
462 retval = (retval " " link)
463 d("href('condition4'): " link)
464 } else if (lowerodp ~ /<link>/) {
466 sub("/<link>/", link)
467 sub("/\/download<\/link>/", link)
469 if (link_seen(link)) {
474 retval = (retval " " link)
475 d("href('condition5'): " link)
477 # <a ...> but not href - skip
478 d("skipping <a > without href: " odp)
482 d("Returning: [" retval "]")
486 function subst_defines(var,defs) {
487 # substitute all possible RPM macros
488 while ((var ~ /%{.*}/) || (var ~ /%[A-Za-z0-9_]+/)) {
491 gsub("%{" j "}", defs[j], var)
492 gsub("%" j , defs[j], var)
493 # conditional macros like %{?patchlevel:.5} - drop these for now
494 gsub("%{\?" j ":.*?}", "", var)
508 function find_mirror(url) {
510 while (succ = (getline line < "mirrors")) {
511 if (succ==-1) { return url }
512 nf=split(line,fields,"|")
517 prefix=substr(url,1,length(origin))
519 d("Mirror found at " mname)
521 return mirror substr(url,length(origin)+1)
529 # fetches file list, and compares version numbers
530 function process_source(number, lurl, name, version) {
531 d("Processing " lurl)
533 if (index(lurl, version) == 0) {
534 d("There is no version number ["version"] in ["lurl"]")
540 gsub("[^/]*$",":&",lurl)
547 if (index(dir,version)) {
548 # directory name as version maching mode:
549 # if /something/version/name-version.tarball then check
550 # in /something/ looking for newer directory
551 dir=substr(dir,1,index(dir,version)-1)
553 sub("(\.tar\.(bz|bz2|gz|lzma|xz)|zip)$","",filename)
556 d("Will check a directory: " dir)
557 d("and a file: " filename)
560 gsub("[+]","\\+",filenameexp)
561 sub(version,"[A-Za-z0-9.]+",filenameexp)
562 gsub("[.]","\\.",filenameexp)
563 sub("\.(bz|bz2|gz|lzma|xz|zip)$",".(bz|bz2|gz|lzma|xz|zip)",filenameexp)
564 d("Expression: " filenameexp)
565 match(filename,version)
566 prever=substr(filename,1,RSTART-1)
567 postver=substr(filename,RSTART+RLENGTH)
568 d("Before number: " prever)
569 d("and after: " postver)
570 newurl=find_mirror(acc "://" host dir)
571 #print acc "://" host dir
572 #newurl=url[1]"://"url[2]url[3]url[4]
573 #newurl=acc "://" host dir filename
574 d("Looking at " newurl)
579 odp = get_links(newurl, filename)
580 if( odp ~ "ERROR: ") {
581 print name "(" number ") " odp
583 d("WebPage downloaded")
585 for (nr=1; nr<=c; nr++) {
588 d("Found link: " addr)
590 # Try not to treat foobar or foo-bar as (possibly newer) version of bar
591 # (practical cases: KXL, lineakconfig, mhash...)
592 # but don't skip cases where name is like "/some/link/0.12.2.tar.gz"
593 if ((addr ~ "[-_.0-9A-Za-z~]" filenameexp) && addr !~ "[-_.0-9A-Za-z~]/" filenameexp) {
597 if (addr ~ filenameexp) {
598 match(addr,filenameexp)
599 newfilename=substr(addr,RSTART,RLENGTH)
600 d("Hypothetical new: " newfilename)
601 newfilename=fixedsub(prever,"",newfilename)
602 newfilename=fixedsub(postver,"",newfilename)
603 d("Version: " newfilename)
604 if (newfilename ~ /\.(asc|sig|pkg|bin|binary|built)$/) continue
605 # strip ending (happens when in directiory name as version matching mode)
606 sub("(\.tar\.(bz|bz2|gz|lzma|xz)|zip)$","",newfilename)
608 if ( compare_ver_dec(version, newfilename)==1 ) {
609 d("Yes, there is new one")
613 } else if ( compare_ver(version, newfilename)==1 ) {
614 d("Yes, there is new one")
621 print name "(" number ") seems ok: " oldversion
623 print name "(" number ") [OLD] " oldversion " [NEW] " version
627 function rss_upgrade(name, ver, url, regex, cmd) {
628 regex = "s/.*<title>" regex "<\/title>.*/\\1/p"
629 cmd = "wget -t 2 -T 45 -q -O - " url " | sed -nre '" regex "' | head -n1"
631 d("rss_upgrade_cmd: " cmd)
638 # check for ZF upgrade from rss
639 function zf_upgrade(name, ver) {
640 return rss_upgrade(name, ver, \
641 "http://devzone.zend.com/tag/Zend_Framework_Management/format/rss2.0", \
642 "Zend Framework ([^\\s]+) Released" \
646 function hudson_upgrade(name, ver) {
647 return rss_upgrade(name, ver, \
648 "https://hudson.dev.java.net/servlets/ProjectRSS?type=news", \
649 "Hudson ([0-9.]+) released" \
653 # upgrade check for pear package using PEAR CLI
654 function pear_upgrade(name, ver, cmd) {
655 sub(/^php-pear-/, "", name);
657 cmd = "pear remote-info " name " | awk '/^Latest/{print $NF}'"
665 function vim_upgrade(name, ver, cmd) {
666 # %patchset_source -f ftp://ftp.vim.org/pub/editors/vim/patches/7.2/7.2.%03g 1 %{patchlevel}
667 cmd = "wget -q -O - ftp://ftp.vim.org/pub/editors/vim/patches/" DEFS["ver"] "/MD5SUMS|grep -vF .gz|tail -n1|awk '{print $2}'"
674 function nodejs_upgrade(name, ver, cmd) {
675 d("NODEJS " name " (as " DEFS["pkg"] ") " ver);
677 cmd = "npm info " DEFS["pkg"] " dist-tags.latest"
679 cmd = "npm info " name " dist-tags.latest"
687 function rubygem_upgrade(name, ver, cmd, pkg) {
688 if (DEFS["gem_name"]) {
689 pkg = DEFS["gem_name"];
691 } else if (DEFS["gemname"]) {
692 pkg = DEFS["gemname"];
694 } else if (DEFS["pkgname"]) {
695 pkg = DEFS["pkgname"];
699 gsub(/^ruby-/, "", pkg);
702 cmd = "gem list --remote '^" pkg "$' | awk '/" pkg "/ {v=$2; sub(/\(/, \"\", v); sub(/\)$/, \"\", v); print v}'"
703 d("RUBYGEM " name " (as " pkg ") " ver ": " cmd);
711 function google_linux_repo(name, ver, reponame, cmd, sourceurl) {
712 sourceurl = "http://dl.google.com/linux/" reponame "/rpm/stable/x86_64/repodata/primary.xml.gz"
713 cmd = "curl -s " sourceurl " | zcat | perl -ne 'm{<name>" name "-" DEFS["state"] "</name>} and m{<version .*ver=.([\d.]+)} and print $1'"
714 d("google repo: " cmd);
721 function jenkins_upgrade(name, ver, urls, url, i, c, chunks, nver) {
724 # http://mirrors.jenkins-ci.org/war-stable/1.509.1/jenkins.war?/jenkins-1.509.1.war
725 gsub("/" ver "/jenkins.war\?/jenkins-" ver ".war", "/", url);
726 c = split(get_links(url), chunks, "/")
727 # new version is second one from the bottom
734 # check for update from release-monitoring.org
735 function rmo_check(name, sourceurl, cmd) {
736 sourceurl = "https://release-monitoring.org/api/project/pld-linux/" name
737 cmd = "echo 'var data='$(curl -s " sourceurl ")';process.stdout.write(data.version)' | node"
745 function process_data(name, ver, rel, src, nver, i) {
746 if (name ~ /^php-pear-/) {
747 nver = pear_upgrade(name, ver);
748 } else if (name == "ZendFramework") {
749 nver = zf_upgrade(name, ver);
750 } else if (name == "hudson") {
751 nver = hudson_upgrade(name, ver);
752 } else if (name == "vim") {
753 nver = vim_upgrade(name, ver);
754 } else if (name == "google-chrome") {
755 nver = google_linux_repo(name, ver, "chrome");
756 } else if (name == "google-talkplugin") {
757 nver = google_linux_repo(name, ver, "talkplugin");
758 } else if (name ~ "^nodejs-") {
759 nver = nodejs_upgrade(name, ver);
760 } else if (name ~ "^ruby-" || name == "chef") {
761 nver = rubygem_upgrade(name, ver);
762 } else if (name ~ "jenkins") {
763 nver = jenkins_upgrade(name, ver, src);
765 nver = rmo_check(name);
769 if (compare_ver(ver, nver)) {
770 print name " [OLD] " ver " [NEW] " nver
772 print name " seems ok: " ver
777 if (name == "xulrunner") {
778 ver = subst_defines(DEFS["firefox_ver"], DEFS)
779 d("package xulrunner, change version to firefox ["ver"]")
782 # this function checks if substitutions were valid, and if true:
783 # processes each URL and tries to get current file list
785 if (src[i] ~ /%{nil}/) {
786 gsub(/\%\{nil\}/, "", src[i])
788 if ( src[i] !~ /%{.*}/ && src[i] !~ /%[A-Za-z0-9_]/ ) {
790 process_source(i, src[i], name, ver)
792 print FNAME ":" i ": impossible substitution: " src[i]
798 # use perl links extraction by default
801 # if you want to use DEBUG, run script with "-v DEBUG=1"
802 # or uncomment the line below
805 errno=system("wget --help > /dev/null 2>&1")
806 if (errno && errno != 3) {
807 print "No wget installed!"
810 if (ARGC>=3 && ARGV[2]=="-n") {
812 for (i=3; i<ARGC; i++) ARGV[i-1]=ARGV[i]
819 # clean frameseen for each ARG
820 for (i in frameseen) {
825 process_data(NAME,VER,REL,SRC)
826 NAME="" ; VER="" ; REL=""
827 for (i in DEFS) delete DEFS[i]
828 for (i in SRC) delete SRC[i]
831 DEFS["_alt_kernel"]=""
836 /^[Uu][Rr][Ll]:/&&(URL=="") { URL=subst_defines($2,DEFS) ; DEFS["url"]=URL }
837 /^[Nn]ame:/&&(NAME=="") { NAME=subst_defines($2,DEFS) ; DEFS["name"]=NAME }
838 /^[Vv]ersion:/&&(VER=="") { VER=subst_defines($2,DEFS) ; DEFS["version"]=VER }
839 /^[Rr]elease:/&&(REL=="") { REL=subst_defines($2,DEFS) ; DEFS["release"]=REL }
840 /^[Ss]ource[0-9]*:/ { if (/(ftp|http|https):\/\//) SRC[FNR]=subst_defines($2,DEFS) }
841 /%define/ { DEFS[$2]=subst_defines($3,DEFS) }
844 process_data(NAME,VER,REL,SRC)