]> git.pld-linux.org Git - packages/rpm-build-tools.git/blame_incremental - pldnotify.awk
builder: Check md5 sums of downloaded patches
[packages/rpm-build-tools.git] / pldnotify.awk
... / ...
CommitLineData
1#!/bin/awk -f
2#
3# Copyright (C) 2000-2013 PLD-Team <feedback@pld-linux.org>
4# Authors:
5# Sebastian Zagrodzki <zagrodzki@pld-linux.org>
6# Jacek Konieczny <jajcus@pld-linux.org>
7# Andrzej Krzysztofowicz <ankry@pld-linux.org>
8# Jakub Bogusz <qboosh@pld-linux.org>
9# Elan Ruusamäe <glen@pld-linux.org>
10#
11# See cvs log pldnotify.awk for list of contributors
12#
13# TODO:
14# - "SourceXDownload" support (use given URLs if present instead of cut-down SourceX URLs)
15# - "SourceXActiveFTP" support
16# - support debian/watch http://wiki.debian.org/debian/watch/
17
18# NOTE:
19# to test run this, run:
20# $ awk -vDEBUG=1 pldnotify.awk < specfile
21#
22# To get full out of it, you need to have following tools installed:
23# - perl, sed, wget, coreutils, util-linux
24# - perl-HTML-Tree (HTML::TreeBuilder module) for better links parser (-vUSE_PERL=0 to disable)
25# - php-pear-PEAR for php-pear package updates
26# - npm for nodejs packages
27#
28# Additionally "mirrors" file in current dir, controls local mirrors you prefer
29
30function d(s) {
31 if (!DEBUG) {
32 return
33 }
34
35# print strftime("%Y-%m-%d %H:%M:%S ") s >> "/dev/stderr"
36 print s >> "/dev/stderr"
37}
38
39function fixedsub(s1,s2,t, ind) {
40# substitutes fixed strings (not regexps)
41 if (ind = index(t,s1)) {
42 t = substr(t, 1, ind-1) s2 substr(t, ind+length(s1))
43 }
44 return t
45}
46
47function ispre(s) {
48 if ((s~"pre")||(s~"PRE")||(s~"beta")||(s~"BETA")||(s~"alpha")||(s~"ALPHA")||(s~"rc")||(s~"RC")) {
49 d("pre-version")
50 return 1
51 } else {
52 return 0
53 }
54}
55
56function compare_ver(v1,v2) {
57# compares version numbers
58 while (match(v1,/[a-zA-Z][0-9]|[0-9][a-zA-Z]/))
59 v1=(substr(v1,1,RSTART) "." substr(v1,RSTART+RLENGTH-1))
60 while (match(v2,/[a-zA-Z][0-9]|[0-9][a-zA-Z]/))
61 v2=(substr(v2,1,RSTART) "." substr(v2,RSTART+RLENGTH-1))
62 sub("^0*","",v1)
63 sub("^0*","",v2)
64 gsub("\.0*",".",v1)
65 gsub("\.0*",".",v2)
66 d("v1 == " v1)
67 d("v2 == " v2)
68 count=split(v1,v1a,"\.")
69 count2=split(v2,v2a,"\.")
70
71 if (count<count2) mincount=count
72 else mincount=count2
73
74 for (i=1; i<=mincount; i++) {
75 if (v1a[i]=="") v1a[i]=0
76 if (v2a[i]=="") v2a[i]=0
77 d("i == " i)
78 d("v1[i] == " v1a[i])
79 d("v2[i] == " v2a[i])
80 if ((v1a[i]~/[0-9]/)&&(v2a[i]~/[0-9]/)) {
81 if (length(v2a[i])>length(v1a[i]))
82 return 1
83 else if (v2a[i]>v1a[i])
84 return 1
85 else if (length(v1a[i])>length(v2a[i]))
86 return 0
87 else if (v1a[i]>v2a[i])
88 return 0
89 } else if ((v1a[i]~/[A-Za-z]/)&&(v2a[i]~/[A-Za-z]/)) {
90 if (v2a[i]>v1a[i])
91 return 1
92 else if (v1a[i]>v2a[i])
93 return 0
94 } else if (ispre(v1a[i]) == 1)
95 return 1
96 else
97 return 0
98 }
99 if ((count2==mincount)&&(count!=count2)) {
100 for (i=count2+1; i<=count; i++)
101 if (ispre(v1a[i]) == 1)
102 return 1
103 return 0
104 } else if (count!=count2) {
105 for (i=count+1; i<=count2; i++)
106 if (ispre(v2a[i]) == 1)
107 return 0
108 return 1
109 }
110 return 0
111}
112
113function compare_ver_dec(v1,v2) {
114# compares version numbers as decimal floats
115 while (match(v1,/[0-9][a-zA-Z]/))
116 v1=(substr(v1,1,RSTART) "." substr(v1,RSTART+RLENGTH-1))
117 while (match(v2,/[0-9][a-zA-Z]/))
118 v2=(substr(v2,1,RSTART) "." substr(v2,RSTART+RLENGTH-1))
119 sub("^0*","",v1)
120 sub("^0*","",v2)
121 d("v1 == " v1)
122 d("v2 == " v2)
123 count=split(v1,v1a,"\.")
124 count2=split(v2,v2a,"\.")
125
126 if (count<count2) mincount=count
127 else mincount=count2
128
129 for (i=1; i<=mincount; i++) {
130 if (v1a[i]=="") v1a[i]=0
131 if (v2a[i]=="") v2a[i]=0
132 d("i == " i)
133 d("v1[i] == " v1a[i])
134 d("v2[i] == " v2a[i])
135 if ((v1a[i]~/[0-9]/)&&(v2a[i]~/[0-9]/)) {
136 if (i==2) {
137 if (0+("." v2a[i])>0+("." v1a[i]))
138 return 1
139 else if (0+("." v1a[i])>0+("." v2a[i]))
140 return 0
141 } else {
142 if (length(v2a[i])>length(v1a[i]))
143 return 1
144 else if (v2a[i]>v1a[i])
145 return 1
146 else if (length(v1a[i])>length(v2a[i]))
147 return 0
148 else if (v1a[i]>v2a[i])
149 return 0
150 }
151 } else if ((v1a[i]~/[A-Za-z]/)&&(v2a[i]~/[A-Za-z]/)) {
152 if (v2a[i]>v1a[i])
153 return 1
154 else if (v1a[i]>v2a[i])
155 return 0
156 } else if (ispre(v1a[i]) == 1)
157 return 1
158 else
159 return 0
160 }
161 if ((count2==mincount)&&(count!=count2)) {
162 for (i=count2+1; i<=count; i++)
163 if (ispre(v1a[i]) == 1)
164 return 1
165 return 0
166 } else if (count!=count2) {
167 for (i=count+1; i<=count2; i++)
168 if (ispre(v2a[i]) == 1)
169 return 0
170 return 1
171 }
172 return 0
173}
174
175function link_seen(link) {
176 for (seenlink in frameseen) {
177 if (seenlink == link) {
178 d("Link: [" link "] seen already, skipping...")
179 return 1
180 }
181 }
182 frameseen[link]=1
183 return 0
184}
185
186function mktemp( _cmd, _tmpfile) {
187 _cmd = "mktemp /tmp/XXXXXX"
188 _cmd | getline _tmpfile
189 close(_cmd)
190 return _tmpfile
191}
192
193# fix link to artificial one that will be recognized rest of this script
194function postfix_link(url, link, oldlink) {
195 oldlink = link
196 if ((url ~/^(http|https):\/\/github.com\//) && (link ~ /.*\/tarball\//)) {
197 gsub(".*\/tarball\/", "", link)
198 link = link ".tar.gz"
199 }
200 if (oldlink != link) {
201 d("POST FIXED URL [ " oldlink " ] to [ " link " ]")
202 }
203 return link
204}
205
206# use perl HTML::TreeBuilder module to extract links from html
207# it returns TAGNAME LINK in output which is pretty stright forward to parse in awk
208function extract_links_cmd(tmpfile) {
209 return "perl -MHTML::TreeBuilder -e ' \
210 my $content = join q//, <>; \
211 my $root = new HTML::TreeBuilder; \
212 $root->parse($content); \
213 \
214 my %links = (); \
215 for (@{$root->extract_links(qw(a iframe))}) { \
216 my($link, $element, $attr, $tag) = @$_; \
217 $links{$link} = $tag; \
218 } \
219 \
220 while (my($link, $tag) = each %links) { \
221 print $tag, q/ /, $link, $/; \
222 } \
223 ' " tmpfile
224}
225
226# get all <A HREF=..> tags from specified URL
227function get_links(url,filename, errno,link,oneline,retval,odp,wholeodp,lowerodp,tmpfile,cmd) {
228
229 wholeerr=""
230
231 tmpfile = mktemp()
232 tmpfileerr = mktemp()
233
234 if (url ~ /^http:\/\/(download|dl)\.(sf|sourceforge)\.net\//) {
235 # http://downloads.sourceforge.net/project/mediainfo/source/mediainfo/
236 gsub("^http://(download|dl)\.(sf|sourceforge)\.net/", "", url)
237
238 gsub("/.*", "", url)
239 url = "http://sourceforge.net/projects/" url "/files/"
240 d("sf url, mungled url to: " url)
241 }
242
243 if (url ~ /^http:\/\/(.*)\.googlecode\.com\/files\//) {
244 gsub("^http://", "", url)
245 gsub("\..*", "", url)
246 url = "http://code.google.com/p/" url "/downloads/list"
247 d("googlecode url, mungled url to: " url)
248 }
249
250 if (url ~ /^http:\/\/pecl.php.net\/get\//) {
251 gsub("-.*", "", filename)
252 url = "http://pecl.php.net/package/" filename
253 d("pecl.php.net url, mungled url to: " url)
254 }
255
256 if (url ~ /^(http|ftp):\/\/mysql.*\/Downloads\/MySQL-5.1\//) {
257 url = "http://dev.mysql.com/downloads/mysql/5.1.html#source"
258 d("mysql 5.1 url, mungled url to: " url)
259 }
260
261 if (url ~/^(http|https):\/\/launchpad\.net\/(.*)\//) {
262 gsub("^(http|https):\/\/launchpad\.net\/", "", url)
263 gsub("\/.*/", "", url)
264 url = "https://code.launchpad.net/" url "/+download"
265 d("main launchpad url, mungled url to: " url)
266 }
267
268 if (url ~/^(http|https):\/\/edge\.launchpad\.net\/(.*)\//) {
269 gsub("^(http|https):\/\/edge\.launchpad\.net\/", "", url)
270 gsub("\/.*/", "", url)
271 url = "https://edge.launchpad.net/" url "/+download"
272 d("edge launchpad url, mungled url to: " url)
273 }
274
275 if (url ~/^(http|https):\/\/github.com\/.*\/(.*)\/tarball\//) {
276 gsub("\/tarball\/.*", "/downloads", url)
277 d("github tarball url, mungled url to: " url)
278 }
279
280 if (url ~/^(http|https):\/\/cgit\..*\/(.*)\/snapshot\//) {
281 gsub("\/snapshot\/.*", "/", url)
282 d("cgit snapshot tarball url, mungled url to: " url)
283 }
284
285 if (url ~/^(http|https):\/\/www2\.aquamaniac\.de\/sites\/download\//) {
286 url = "http://www2.aquamaniac.de/sites/download/packages.php"
287 d("aquamaniac.de tarball url, mungled url to: " url)
288 }
289
290 d("Retrieving: " url)
291 user_agent = "Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2) Gecko/20100129 PLD/3.0 (Th) Iceweasel/3.6"
292 cmd = "wget -t 2 -T 45 --user-agent \"" user_agent "\" -nv -O - \"" url "\" --passive-ftp --no-check-certificate > " tmpfile " 2> " tmpfileerr
293 d("Execute: " cmd)
294 errno = system(cmd)
295 d("Execute done")
296
297 if (errno != 0) {
298 d("Reading failure response...")
299 wholeerr = ""
300 while (getline oneline < tmpfileerr)
301 wholeerr = (wholeerr " " oneline)
302 d("Error Response: " wholeerr)
303
304 system("rm -f " tmpfile)
305 system("rm -f " tmpfileerr)
306 retval = ("WGET ERROR: " errno ": " wholeerr)
307 return retval
308 }
309 system("rm -f " tmpfileerr)
310
311 urldir = url;
312 sub(/[^\/]+$/, "", urldir)
313
314if (USE_PERL) {
315 cmd = extract_links_cmd(tmpfile)
316 while (cmd | getline) {
317 tag = $1
318 link = substr($0, length(tag) + 2)
319
320 if (tag == "iframe") {
321 d("Frame: " link)
322 if (url !~ /\//) {
323 link = (urldir link)
324 d("Frame->: " link)
325 }
326
327 if (link_seen(link)) {
328 continue
329 }
330 retval = (retval " " get_links(link))
331 }
332
333 if (link_seen(link)) {
334 continue
335 }
336
337 retval = (retval " " link)
338 d("href(): " link)
339 }
340 close(cmd)
341 system("rm -f " tmpfile)
342
343 d("Returning: [" retval "]")
344 return retval
345}
346
347 wholeodp = ""
348 d("Reading success response...")
349 while (getline oneline < tmpfile) {
350 wholeodp = (wholeodp " " oneline)
351# d("Response: " wholeodp)
352 }
353 d("Reponse read done...")
354 system("rm -f " tmpfile)
355
356 while (match(wholeodp, /<([aA]|[fF][rR][aA][mM][eE])[ \t][^>]*>/) > 0) {
357 d("Processing links...")
358 odp = substr(wholeodp,RSTART,RLENGTH);
359 wholeodp = substr(wholeodp,RSTART+RLENGTH);
360
361 lowerodp = tolower(odp);
362 if (lowerodp ~ /<frame[ \t]/) {
363 sub(/[sS][rR][cC]=[ \t]*/, "src=", odp);
364 match(odp, /src="[^"]+"/)
365 newurl = substr(odp, RSTART+5, RLENGTH-6)
366 d("Frame: " newurl)
367 if (newurl !~ /\//) {
368 newurl=(urldir newurl)
369 d("Frame->: " newurl)
370 }
371
372 if (link_seen(newurl)) {
373 newurl = ""
374 continue
375 }
376
377 retval = (retval " " get_links(newurl))
378 } else if (lowerodp ~ /href=[ \t]*"[^"]*"/) {
379 sub(/[hH][rR][eE][fF]=[ \t]*"/,"href=\"",odp)
380 match(odp,/href="[^"]*"/)
381 link=substr(odp,RSTART,RLENGTH)
382 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
383 link=substr(link,7,length(link)-7)
384 link=postfix_link(url, link)
385
386 if (link_seen(link)) {
387 link=""
388 continue
389 }
390
391 # link ends with at least 2 digit version
392 mlink = ""
393 if (link ~ /^.*\/[v]*[0-9\.]+[0-9]\/$/)
394 mlink = get_links(link)
395
396 retval = (retval " " link " " mlink)
397 d("href(\"\"): " link)
398 } else if (lowerodp ~ /href=[ \t]*'[^']*'/) {
399 sub(/[hH][rR][eE][fF]=[ \t]*'/,"href='",odp)
400 match(odp,/href='[^']*'/)
401 link=substr(odp,RSTART,RLENGTH)
402 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
403 link=substr(link,7,length(link)-7)
404 link=postfix_link(url, link)
405
406 if (link_seen(link)) {
407 link=""
408 continue
409 }
410
411 retval = (retval " " link)
412 d("href(''): " link)
413 } else if (lowerodp ~ /href=[ \t]*[^ \t>]*/) {
414 sub(/[hH][rR][eE][fF]=[ \t]*/,"href=",odp)
415 match(odp,/href=[^ \t>]*/)
416 link=substr(odp,RSTART,RLENGTH)
417 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
418 link=substr(link,6,length(link)-5)
419
420 if (link_seen(link)) {
421 link=""
422 continue
423 }
424
425 retval = (retval " " link)
426 d("href(): " link)
427 } else {
428 # <a ...> but not href - skip
429 d("skipping <a > without href: " odp)
430 }
431 }
432
433 d("Returning: [" retval "]")
434 return retval
435}
436
437function subst_defines(var,defs) {
438# substitute all possible RPM macros
439 while ((var ~ /%{.*}/) || (var ~ /%[A-Za-z0-9_]+/)) {
440 oldvar=var
441 for (j in defs) {
442 gsub("%{" j "}", defs[j], var)
443 gsub("%" j , defs[j], var)
444 }
445 if (var==oldvar) {
446 if (DEBUG) {
447 for (i in defs) {
448 d(i " == " defs[i])
449 }
450 }
451 return var
452 }
453 }
454 return var
455}
456
457function find_mirror(url) {
458
459 while (succ = (getline line < "mirrors")) {
460 if (succ==-1) { return url }
461 nf=split(line,fields,"|")
462 if (nf>1){
463 origin=fields[1]
464 mirror=fields[2]
465 mname=fields[3]
466 prefix=substr(url,1,length(origin))
467 if (prefix==origin){
468 d("Mirror found at " mname)
469 close("mirrors")
470 return mirror substr(url,length(origin)+1)
471 }
472 }
473 }
474
475 return url
476}
477
478# fetches file list, and compares version numbers
479function process_source(number, lurl, name, version) {
480 d("Processing " lurl)
481
482 if (index(lurl, version) == 0) {
483 d("There is no version number ["version"] in ["lurl"]")
484 return 0
485 }
486
487 sub("://",":",lurl)
488 sub("/",":/",lurl)
489 gsub("[^/]*$",":&",lurl)
490 split(lurl,url,":")
491 acc=url[1]
492 host=url[2]
493 dir=url[3]
494 filename=url[4]
495
496 if (index(dir,version)) {
497 # directory name as version maching mode:
498 # if /something/version/name-version.tarball then check
499 # in /something/ looking for newer directory
500 dir=substr(dir,1,index(dir,version)-1)
501 sub("[^/]*$","",dir)
502 sub("(\.tar\.(bz|bz2|gz|lzma|xz)|zip)$","",filename)
503 }
504
505 d("Will check a directory: " dir)
506 d("and a file: " filename)
507
508 filenameexp=filename
509 gsub("[+]","\\+",filenameexp)
510 sub(version,"[A-Za-z0-9.]+",filenameexp)
511 gsub("[.]","\\.",filenameexp)
512 d("Expression: " filenameexp)
513 match(filename,version)
514 prever=substr(filename,1,RSTART-1)
515 postver=substr(filename,RSTART+RLENGTH)
516 d("Before number: " prever)
517 d("and after: " postver)
518 newurl=find_mirror(acc "://" host dir)
519 #print acc "://" host dir
520 #newurl=url[1]"://"url[2]url[3]url[4]
521 #newurl=acc "://" host dir filename
522 d("Looking at " newurl)
523
524 references=0
525 finished=0
526 oldversion=version
527 odp = get_links(newurl, filename)
528 if( odp ~ "ERROR: ") {
529 print name "(" number ") " odp
530 } else {
531 d("WebPage downloaded")
532 c=split(odp,linki)
533 for (nr=1; nr<=c; nr++) {
534 addr=linki[nr]
535
536 d("Found link: " addr)
537
538 # github has very different tarball links that clash with this safe check
539 if (!(newurl ~/^(http|https):\/\/github.com\/.*\/tarball/)) {
540 if (addr ~ "[-_.0-9A-Za-z~]" filenameexp) {
541 continue
542 }
543 }
544
545 if (addr ~ filenameexp) {
546 match(addr,filenameexp)
547 newfilename=substr(addr,RSTART,RLENGTH)
548 d("Hypothetical new: " newfilename)
549 newfilename=fixedsub(prever,"",newfilename)
550 newfilename=fixedsub(postver,"",newfilename)
551 d("Version: " newfilename)
552 if (newfilename ~ /\.(asc|sig|pkg|bin|binary|built)$/) continue
553 # strip ending (happens when in directiory name as version matching mode)
554 sub("(\.tar\.(bz|bz2|gz|lzma|xz)|zip)$","",newfilename)
555 if (NUMERIC) {
556 if ( compare_ver_dec(version, newfilename)==1 ) {
557 d("Yes, there is new one")
558 version=newfilename
559 finished=1
560 }
561 } else if ( compare_ver(version, newfilename)==1 ) {
562 d("Yes, there is new one")
563 version=newfilename
564 finished=1
565 }
566 }
567 }
568 if (finished == 0)
569 print name "(" number ") seems ok: " oldversion
570 else
571 print name "(" number ") [OLD] " oldversion " [NEW] " version
572 }
573}
574
575function rss_upgrade(name, ver, url, regex, cmd) {
576 regex = "s/.*<title>" regex "<\/title>.*/\\1/p"
577 cmd = "wget -t 2 -T 45 -q -O - " url " | sed -nre '" regex "' | head -n1"
578
579 d("rss_upgrade_cmd: " cmd)
580 cmd | getline ver
581 close(cmd)
582
583 return ver
584}
585
586# check for ZF upgrade from rss
587function zf_upgrade(name, ver) {
588 return rss_upgrade(name, ver, \
589 "http://devzone.zend.com/tag/Zend_Framework_Management/format/rss2.0", \
590 "Zend Framework ([^\\s]+) Released" \
591 );
592}
593
594function hudson_upgrade(name, ver) {
595 return rss_upgrade(name, ver, \
596 "https://hudson.dev.java.net/servlets/ProjectRSS?type=news", \
597 "Hudson ([0-9.]+) released" \
598 );
599}
600
601# upgrade check for pear package using PEAR CLI
602function pear_upgrade(name, ver, cmd) {
603 sub(/^php-pear-/, "", name);
604
605 cmd = "pear remote-info " name " | awk '/^Latest/{print $NF}'"
606 d("PEAR: " cmd)
607 cmd | getline ver
608 close(cmd)
609
610 return ver
611}
612
613function vim_upgrade(name, ver, cmd) {
614 # %patchset_source -f ftp://ftp.vim.org/pub/editors/vim/patches/7.2/7.2.%03g 1 %{patchlevel}
615 cmd = "wget -q -O - ftp://ftp.vim.org/pub/editors/vim/patches/" DEFS["ver"] "/MD5SUMS|grep -vF .gz|tail -n1|awk '{print $2}'"
616 d("VIM: " cmd)
617 cmd | getline ver
618 close(cmd)
619 return ver
620}
621
622function nodejs_upgrade(name, ver, cmd) {
623 d("NODEJS " name " (as " DEFS["pkg"] ") " ver);
624 if (DEFS["pkg"]) {
625 cmd = "npm info " DEFS["pkg"] " dist-tags.latest"
626 } else {
627 cmd = "npm info " name " dist-tags.latest"
628 }
629 cmd | getline ver
630 close(cmd)
631
632 return ver
633}
634
635function chrome_upgrade(name, ver, cmd, sourceurl) {
636 sourceurl = "http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/primary.xml.gz"
637 cmd = "curl -s " sourceurl " | zcat | perl -ne 'm{<name>google-chrome-" DEFS["state"] "</name>} and m{<version .*ver=.([\d.]+)} and print $1'"
638 d("CHROME " cmd);
639 cmd | getline ver
640 close(cmd)
641
642 return ver
643}
644
645function process_data(name, ver, rel, src, nver) {
646 if (name ~ /^php-pear-/) {
647 nver = pear_upgrade(name, ver);
648 } else if (name == "ZendFramework") {
649 nver = zf_upgrade(name, ver);
650 } else if (name == "hudson") {
651 nver = hudson_upgrade(name, ver);
652 } else if (name == "vim") {
653 nver = vim_upgrade(name, ver);
654 } else if (name == "google-chrome") {
655 nver = chrome_upgrade(name, ver);
656 } else if (name ~ "^nodejs-") {
657 nver = nodejs_upgrade(name, ver);
658 }
659
660 if (nver) {
661 if (compare_ver(ver, nver)) {
662 print name " [OLD] " ver " [NEW] " nver
663 } else {
664 print name " seems ok: " ver
665 }
666 return;
667 }
668
669 if (name == "xulrunner") {
670 ver = subst_defines(DEFS["firefox_ver"], DEFS)
671 d("package xulrunner, change version to firefox ["ver"]")
672 }
673
674# this function checks if substitutions were valid, and if true:
675# processes each URL and tries to get current file list
676 for (i in src) {
677 if (src[i] ~ /%{nil}/) {
678 gsub(/\%\{nil\}/, "", src[i])
679 }
680 if ( src[i] !~ /%{.*}/ && src[i] !~ /%[A-Za-z0-9_]/ ) {
681 d("Source: " src[i])
682 process_source(i, src[i], name, ver)
683 } else {
684 print FNAME ":" i ": impossible substitution: " src[i]
685 }
686 }
687}
688
689BEGIN {
690 # if you want to use DEBUG, run script with "-v DEBUG=1"
691 # or uncomment the line below
692 # DEBUG = 1
693
694 errno=system("wget --help > /dev/null 2>&1")
695 if (errno && errno != 3) {
696 print "No wget installed!"
697 exit 1
698 }
699 if (ARGC>=3 && ARGV[2]=="-n") {
700 NUMERIC=1
701 for (i=3; i<ARGC; i++) ARGV[i-1]=ARGV[i]
702 ARGC=ARGC-1
703 }
704}
705
706FNR==1 {
707 if ( ARGIND != 1 ) {
708 # clean frameseen for each ARG
709 for (i in frameseen) {
710 delete frameseen[i]
711 }
712 frameseen[0] = 1
713
714 process_data(NAME,VER,REL,SRC)
715 NAME="" ; VER="" ; REL=""
716 for (i in DEFS) delete DEFS[i]
717 for (i in SRC) delete SRC[i]
718 }
719 FNAME=FILENAME
720 DEFS["_alt_kernel"]=""
721 DEFS["20"]="\\ "
722 DEFS["nil"]=""
723}
724
725/^[Uu][Rr][Ll]:/&&(URL=="") { URL=subst_defines($2,DEFS) ; DEFS["url"]=URL }
726/^[Nn]ame:/&&(NAME=="") { NAME=subst_defines($2,DEFS) ; DEFS["name"]=NAME }
727/^[Vv]ersion:/&&(VER=="") { VER=subst_defines($2,DEFS) ; DEFS["version"]=VER }
728/^[Rr]elease:/&&(REL=="") { REL=subst_defines($2,DEFS) ; DEFS["release"]=REL }
729/^[Ss]ource[0-9]*:/ { if (/(ftp|http|https):\/\//) SRC[FNR]=subst_defines($2,DEFS) }
730/%define/ { DEFS[$2]=subst_defines($3,DEFS) }
731
732END {
733 process_data(NAME,VER,REL,SRC)
734}
This page took 0.047861 seconds and 4 git commands to generate.