]> git.pld-linux.org Git - packages/rpm-build-tools.git/blame_incremental - pldnotify.awk
- builder: add new kernel build infra support to alt-kernel option
[packages/rpm-build-tools.git] / pldnotify.awk
... / ...
CommitLineData
1#!/bin/awk -f
2#
3# Copyright (C) 2000-2013 PLD-Team <feedback@pld-linux.org>
4# Authors:
5# Sebastian Zagrodzki <zagrodzki@pld-linux.org>
6# Jacek Konieczny <jajcus@pld-linux.org>
7# Andrzej Krzysztofowicz <ankry@pld-linux.org>
8# Jakub Bogusz <qboosh@pld-linux.org>
9# Elan Ruusamäe <glen@pld-linux.org>
10#
11# See cvs log pldnotify.awk for list of contributors
12#
13# TODO:
14# - "SourceXDownload" support (use given URLs if present instead of cut-down SourceX URLs)
15# - "SourceXActiveFTP" support
16# - support debian/watch http://wiki.debian.org/debian/watch/
17
18# NOTE:
19# to test run this, run:
20# $ awk -vDEBUG=1 pldnotify.awk < specfile
21#
22# To get full out of it, you need to have following tools installed:
23# - perl, sed, wget, coreutils, util-linux
24# - perl-HTML-Tree (HTML::TreeBuilder module) for better links parser (-vUSE_PERL=0 to disable)
25# - pear (php-pear-PEAR) for php-pear package updates
26# - npm for nodejs packages
27# - gem (ruby-rubygems) for ruby/rubygem packages
28#
29# Additionally "mirrors" file in current dir, controls local mirrors you prefer
30
31function d(s) {
32 if (!DEBUG) {
33 return
34 }
35
36# print strftime("%Y-%m-%d %H:%M:%S ") s >> "/dev/stderr"
37 print s >> "/dev/stderr"
38}
39
40function fixedsub(s1,s2,t, ind) {
41# substitutes fixed strings (not regexps)
42 if (ind = index(t,s1)) {
43 t = substr(t, 1, ind-1) s2 substr(t, ind+length(s1))
44 }
45 return t
46}
47
48function ispre(s) {
49 if ((s~"pre")||(s~"PRE")||(s~"beta")||(s~"BETA")||(s~"alpha")||(s~"ALPHA")||(s~"rc")||(s~"RC")) {
50 d("pre-version")
51 return 1
52 } else {
53 return 0
54 }
55}
56
57function compare_ver(v1,v2) {
58# compares version numbers
59 while (match(v1,/[a-zA-Z][0-9]|[0-9][a-zA-Z]/))
60 v1=(substr(v1,1,RSTART) "." substr(v1,RSTART+RLENGTH-1))
61 while (match(v2,/[a-zA-Z][0-9]|[0-9][a-zA-Z]/))
62 v2=(substr(v2,1,RSTART) "." substr(v2,RSTART+RLENGTH-1))
63 sub("^0*","",v1)
64 sub("^0*","",v2)
65 gsub("\.0*",".",v1)
66 gsub("\.0*",".",v2)
67 d("v1 == " v1)
68 d("v2 == " v2)
69 count=split(v1,v1a,"\.")
70 count2=split(v2,v2a,"\.")
71
72 if (count<count2) mincount=count
73 else mincount=count2
74
75 for (i=1; i<=mincount; i++) {
76 if (v1a[i]=="") v1a[i]=0
77 if (v2a[i]=="") v2a[i]=0
78 d("i == " i)
79 d("v1[i] == " v1a[i])
80 d("v2[i] == " v2a[i])
81 if ((v1a[i]~/[0-9]/)&&(v2a[i]~/[0-9]/)) {
82 if (length(v2a[i])>length(v1a[i]))
83 return 1
84 else if (v2a[i]>v1a[i])
85 return 1
86 else if (length(v1a[i])>length(v2a[i]))
87 return 0
88 else if (v1a[i]>v2a[i])
89 return 0
90 } else if ((v1a[i]~/[A-Za-z]/)&&(v2a[i]~/[A-Za-z]/)) {
91 if (v2a[i]>v1a[i])
92 return 1
93 else if (v1a[i]>v2a[i])
94 return 0
95 } else if (ispre(v1a[i]) == 1)
96 return 1
97 else
98 return 0
99 }
100 if ((count2==mincount)&&(count!=count2)) {
101 for (i=count2+1; i<=count; i++)
102 if (ispre(v1a[i]) == 1)
103 return 1
104 return 0
105 } else if (count!=count2) {
106 for (i=count+1; i<=count2; i++)
107 if (ispre(v2a[i]) == 1)
108 return 0
109 return 1
110 }
111 return 0
112}
113
114function compare_ver_dec(v1,v2) {
115# compares version numbers as decimal floats
116 while (match(v1,/[0-9][a-zA-Z]/))
117 v1=(substr(v1,1,RSTART) "." substr(v1,RSTART+RLENGTH-1))
118 while (match(v2,/[0-9][a-zA-Z]/))
119 v2=(substr(v2,1,RSTART) "." substr(v2,RSTART+RLENGTH-1))
120 sub("^0*","",v1)
121 sub("^0*","",v2)
122 d("v1 == " v1)
123 d("v2 == " v2)
124 count=split(v1,v1a,"\.")
125 count2=split(v2,v2a,"\.")
126
127 if (count<count2) mincount=count
128 else mincount=count2
129
130 for (i=1; i<=mincount; i++) {
131 if (v1a[i]=="") v1a[i]=0
132 if (v2a[i]=="") v2a[i]=0
133 d("i == " i)
134 d("v1[i] == " v1a[i])
135 d("v2[i] == " v2a[i])
136 if ((v1a[i]~/[0-9]/)&&(v2a[i]~/[0-9]/)) {
137 if (i==2) {
138 if (0+("." v2a[i])>0+("." v1a[i]))
139 return 1
140 else if (0+("." v1a[i])>0+("." v2a[i]))
141 return 0
142 } else {
143 if (length(v2a[i])>length(v1a[i]))
144 return 1
145 else if (v2a[i]>v1a[i])
146 return 1
147 else if (length(v1a[i])>length(v2a[i]))
148 return 0
149 else if (v1a[i]>v2a[i])
150 return 0
151 }
152 } else if ((v1a[i]~/[A-Za-z]/)&&(v2a[i]~/[A-Za-z]/)) {
153 if (v2a[i]>v1a[i])
154 return 1
155 else if (v1a[i]>v2a[i])
156 return 0
157 } else if (ispre(v1a[i]) == 1)
158 return 1
159 else
160 return 0
161 }
162 if ((count2==mincount)&&(count!=count2)) {
163 for (i=count2+1; i<=count; i++)
164 if (ispre(v1a[i]) == 1)
165 return 1
166 return 0
167 } else if (count!=count2) {
168 for (i=count+1; i<=count2; i++)
169 if (ispre(v2a[i]) == 1)
170 return 0
171 return 1
172 }
173 return 0
174}
175
176function link_seen(link) {
177 for (seenlink in frameseen) {
178 if (seenlink == link) {
179 d("Link: [" link "] seen already, skipping...")
180 return 1
181 }
182 }
183 frameseen[link]=1
184 return 0
185}
186
187function mktemp( _cmd, _tmpfile) {
188 _cmd = "mktemp /tmp/XXXXXX"
189 _cmd | getline _tmpfile
190 close(_cmd)
191 return _tmpfile
192}
193
194# fix link to artificial one that will be recognized rest of this script
195function postfix_link(url, link, oldlink) {
196 oldlink = link
197 if ((url ~/^(http|https):\/\/github.com\//) && (link ~ /.*\/tarball\//)) {
198 gsub(".*\/tarball\/", "", link)
199 link = link ".tar.gz"
200 }
201 if (oldlink != link) {
202 d("POST FIXED URL [ " oldlink " ] to [ " link " ]")
203 }
204 return link
205}
206
207# use perl HTML::TreeBuilder module to extract links from html
208# it returns TAGNAME LINK in output which is pretty stright forward to parse in awk
209function extract_links_cmd(tmpfile) {
210 return "perl -MHTML::TreeBuilder -e ' \
211 my $content = join q//, <>; \
212 my $root = new HTML::TreeBuilder; \
213 $root->parse($content); \
214 \
215 my %links = (); \
216 for (@{$root->extract_links(qw(a iframe))}) { \
217 my($link, $element, $attr, $tag) = @$_; \
218 $links{$link} = $tag; \
219 } \
220 \
221 while (my($link, $tag) = each %links) { \
222 print $tag, q/ /, $link, $/; \
223 } \
224 ' " tmpfile
225}
226
227# get all <A HREF=..> tags from specified URL
228function get_links(url,filename, errno,link,oneline,retval,odp,wholeodp,lowerodp,tmpfile,cmd) {
229
230 wholeerr=""
231
232 tmpfile = mktemp()
233 tmpfileerr = mktemp()
234
235 if (url ~ /^http:\/\/(download|downloads|dl)\.(sf|sourceforge)\.net\//) {
236 newurl = url
237 # http://dl.sourceforge.net/threestore/
238 # http://downloads.sourceforge.net/project/mediainfo/source/mediainfo/
239 gsub("^http://(download|downloads|dl)\.(sf|sourceforge)\.net/", "", newurl)
240 gsub("^project/", "", newurl)
241 gsub("/.*", "", newurl)
242 url = "http://sourceforge.net/projects/" newurl "/rss?path=/"
243 d("sf url, mangled url to: " url)
244
245 } else if (url ~ /^http:\/\/(.*)\.googlecode\.com\/files\//) {
246 gsub("^http://", "", url)
247 gsub("\..*", "", url)
248 url = "http://code.google.com/p/" url "/downloads/list"
249 d("googlecode url, mangled url to: " url)
250
251 } else if (url ~ /^http:\/\/pecl.php.net\/get\//) {
252 gsub("-.*", "", filename)
253 url = "http://pecl.php.net/package/" filename
254 d("pecl.php.net url, mangled url to: " url)
255
256 } else if (url ~/http:\/\/cdn.mysql.com\//) {
257 gsub("http:\/\/cdn.mysql.com\/", "", url)
258 url = "http://vesta.informatik.rwth-aachen.de/mysql/" url
259 d("mysql CDN, mangled url to: " url)
260
261 } else if (url ~ /^(http|https):\/\/launchpad\.net\/(.*)\//) {
262 gsub("^(http|https):\/\/launchpad\.net\/", "", url)
263 gsub("\/.*/", "", url)
264 url = "https://code.launchpad.net/" url "/+download"
265 d("main launchpad url, mangled url to: " url)
266
267 } else if (url ~ /^(http|https):\/\/edge\.launchpad\.net\/(.*)\//) {
268 gsub("^(http|https):\/\/edge\.launchpad\.net\/", "", url)
269 gsub("\/.*/", "", url)
270 url = "https://edge.launchpad.net/" url "/+download"
271 d("edge launchpad url, mangled url to: " url)
272
273 } else if (url ~ /^(http|https):\/\/github.com\/.*\/(.*)\/tarball\//) {
274 gsub("\/tarball\/.*", "/downloads", url)
275 d("github tarball url, mangled url to: " url)
276
277 } else if (url ~ /^(http|https):\/\/github.com\/.*\/(.*)\/archive\//) {
278 gsub("\/archive\/.*", "/tags", url)
279 d("github archive url, mangled url to: " url)
280
281 } else if (url ~ /^(http|https):\/\/bitbucket.org\/.*\/get\/.*/) {
282 # https://bitbucket.org/logilab/pylint/get/tip.tar.bz2 -> https://bitbucket.org/logilab/pylint/downloads
283 gsub("\/get\/.*", "/downloads", url)
284 d("github bitbucket url, mangled url to: " url)
285
286 } else if (url ~ /^(http|https):\/\/cgit\..*\/(.*)\/snapshot\//) {
287 gsub("\/snapshot\/.*", "/", url)
288 d("cgit snapshot tarball url, mangled url to: " url)
289
290 } else if (url ~ /^(http|https):\/\/www2\.aquamaniac\.de\/sites\/download\//) {
291 url = "http://www2.aquamaniac.de/sites/download/packages.php"
292 d("aquamaniac.de tarball url, mangled url to: " url)
293
294 } else if (url ~ /^(http|https):\/\/www.process-one.net\/downloads\/ejabberd\//) {
295 url = "http://www.process-one.net/en/ejabberd/archive/"
296 d("ejabberd tarball url, mangled url to: " url)
297
298 } else if (url ~ /^(http|https):\/\/llvm.org\/releases\//) {
299 url = "http://llvm.org/releases/download.html"
300 d("llvm tarball url, mangled url to: " url)
301
302 } else if (url ~ /^(http|https):\/\/download\.owncloud\.org\/community\//) {
303 url = "http://owncloud.org/changelog/"
304 d("owncloud tarball url, mangled url to: " url)
305
306 } else if (url ~ /^(http|https):\/\/hackage\.haskell\.org\/packages\/archive\//) {
307 gsub("\/packages\/archive","/package",url)
308 d("hackage haskell tarball url, mangled url to: " url)
309
310 } else if (url ~ /^http:\/\/www.taskwarrior.org\/download\//) {
311 url = "http://taskwarrior.org/projects/taskwarrior/wiki/Download"
312 d("taskwarrior tarball url, mangled url to: " url)
313 } else if (url ~/^http:\/\/www.rarlab.com\/rar\// && filename ~ /^unrarsrc/) {
314 url = "http://www.rarlab.com/rar_add.htm"
315 d("unrar tarball url, mangled url to: " url)
316 } else if (url ~/^http:\/\/www.rarlab.com\/rar\//) {
317 url = "http://www.rarlab.com/download.htm"
318 d("rar tarball url, mangled url to: " url)
319 } else if (url ~/^(http|https):\/\/pypi.python.org\/packages\/source\/.*/) {
320 gsub("/packages/source/[a-zA-Z0-9]/", "/pypi/", url)
321 d("pypi.python.org url, mangled url to: " url)
322 }
323
324 d("Retrieving: " url)
325 user_agent = "Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2) Gecko/20100129 PLD/3.0 (Th) Iceweasel/3.6"
326 cmd = "wget -t 2 -T 45 --user-agent \"" user_agent "\" -nv -O - \"" url "\" --passive-ftp --no-check-certificate > " tmpfile " 2> " tmpfileerr
327 d("Execute: " cmd)
328 errno = system(cmd)
329 d("Execute done")
330
331 if (errno != 0) {
332 d("Reading failure response...")
333 wholeerr = ""
334 while (getline oneline < tmpfileerr)
335 wholeerr = (wholeerr " " oneline)
336 d("Error Response: " wholeerr)
337
338 system("rm -f " tmpfile)
339 system("rm -f " tmpfileerr)
340 retval = ("WGET ERROR: " errno ": " wholeerr)
341 return retval
342 }
343 system("rm -f " tmpfileerr)
344
345 urldir = url;
346 sub(/[^\/]+$/, "", urldir)
347
348if (USE_PERL) {
349 cmd = extract_links_cmd(tmpfile)
350 while (cmd | getline) {
351 tag = $1
352 link = substr($0, length(tag) + 2)
353
354 if (tag == "iframe") {
355 d("Frame: " link)
356 if (url !~ /\//) {
357 link = (urldir link)
358 d("Frame->: " link)
359 }
360
361 if (link_seen(link)) {
362 continue
363 }
364 retval = (retval " " get_links(link))
365 }
366
367 if (link_seen(link)) {
368 continue
369 }
370
371 retval = (retval " " link)
372 d("href(): " link)
373 }
374 close(cmd)
375
376}
377
378 wholeodp = ""
379 d("Reading success response...")
380 while (getline oneline < tmpfile) {
381 wholeodp = (wholeodp " " oneline)
382# d("Response: " wholeodp)
383 }
384 d("Reponse read done...")
385 system("rm -f " tmpfile)
386
387 # MATCH one of these:
388 #while (match(wholeodp, /<([aA]|[fF][rR][aA][mM][eE])[ \t][^>]*>/) > 0) {
389 #while (match(wholeodp, /<link>[^<]*<\/link>/) > 0) {
390
391 while (match(wholeodp, /(<link>[^<]*<\/link>|<([aA]|[fF][rR][aA][mM][eE])[ \t][^>]*>)/) > 0) {
392 d("Processing links...")
393 odp = substr(wholeodp,RSTART,RLENGTH);
394 wholeodp = substr(wholeodp,RSTART+RLENGTH);
395
396 lowerodp = tolower(odp);
397 if (lowerodp ~ /<frame[ \t]/) {
398 sub(/[sS][rR][cC]=[ \t]*/, "src=", odp);
399 match(odp, /src="[^"]+"/)
400 newurl = substr(odp, RSTART+5, RLENGTH-6)
401 d("Frame: " newurl)
402 if (newurl !~ /\//) {
403 newurl=(urldir newurl)
404 d("Frame->: " newurl)
405 }
406
407 if (link_seen(newurl)) {
408 newurl = ""
409 continue
410 }
411
412 retval = (retval " " get_links(newurl))
413 d("href('condition1': " newurl)
414 } else if (lowerodp ~ /href=[ \t]*"[^"]*"/) {
415 sub(/[hH][rR][eE][fF]=[ \t]*"/,"href=\"",odp)
416 match(odp,/href="[^"]*"/)
417 link=substr(odp,RSTART,RLENGTH)
418 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
419 link=substr(link,7,length(link)-7)
420 link=postfix_link(url, link)
421
422 if (link_seen(link)) {
423 link=""
424 continue
425 }
426
427 # link ends with at least 2 digit version
428 mlink = ""
429 if (link ~ /^.*\/[v]*[0-9\.]+[0-9]\/$/)
430 mlink = get_links(link)
431
432 retval = (retval " " link " " mlink)
433 d("href('condition2'): " link)
434 } else if (lowerodp ~ /href=[ \t]*'[^']*'/) {
435 sub(/[hH][rR][eE][fF]=[ \t]*'/,"href='",odp)
436 match(odp,/href='[^']*'/)
437 link=substr(odp,RSTART,RLENGTH)
438 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
439 link=substr(link,7,length(link)-7)
440 link=postfix_link(url, link)
441
442 if (link_seen(link)) {
443 link=""
444 continue
445 }
446
447 retval = (retval " " link)
448 d("href('condition3'): " link)
449 } else if (lowerodp ~ /href=[ \t]*[^ \t>]*/) {
450 sub(/[hH][rR][eE][fF]=[ \t]*/,"href=",odp)
451 match(odp,/href=[^ \t>]*/)
452 link=substr(odp,RSTART,RLENGTH)
453 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
454 link=substr(link,6,length(link)-5)
455
456 if (link_seen(link)) {
457 link=""
458 continue
459 }
460
461 retval = (retval " " link)
462 d("href('condition4'): " link)
463 } else if (lowerodp ~ /<link>/) {
464 link=lowerodp
465 sub("/<link>/", link)
466 sub("/\/download<\/link>/", link)
467
468 if (link_seen(link)) {
469 link=""
470 continue
471 }
472
473 retval = (retval " " link)
474 d("href('condition5'): " link)
475 } else {
476 # <a ...> but not href - skip
477 d("skipping <a > without href: " odp)
478 }
479 }
480
481 d("Returning: [" retval "]")
482 return retval
483}
484
485function subst_defines(var,defs) {
486# substitute all possible RPM macros
487 while ((var ~ /%{.*}/) || (var ~ /%[A-Za-z0-9_]+/)) {
488 oldvar=var
489 for (j in defs) {
490 gsub("%{" j "}", defs[j], var)
491 gsub("%" j , defs[j], var)
492 # conditional macros like %{?patchlevel:.5} - drop these for now
493 gsub("%{\?" j ":.*?}", "", var)
494 }
495 if (var==oldvar) {
496 if (DEBUG) {
497 for (i in defs) {
498 d(i " == " defs[i])
499 }
500 }
501 return var
502 }
503 }
504 return var
505}
506
507function find_mirror(url) {
508
509 while (succ = (getline line < "mirrors")) {
510 if (succ==-1) { return url }
511 nf=split(line,fields,"|")
512 if (nf>1){
513 origin=fields[1]
514 mirror=fields[2]
515 mname=fields[3]
516 prefix=substr(url,1,length(origin))
517 if (prefix==origin){
518 d("Mirror found at " mname)
519 close("mirrors")
520 return mirror substr(url,length(origin)+1)
521 }
522 }
523 }
524
525 return url
526}
527
528# fetches file list, and compares version numbers
529function process_source(number, lurl, name, version) {
530 d("Processing " lurl)
531
532 if (index(lurl, version) == 0) {
533 d("There is no version number ["version"] in ["lurl"]")
534 return 0
535 }
536
537 sub("://",":",lurl)
538 sub("/",":/",lurl)
539 gsub("[^/]*$",":&",lurl)
540 split(lurl,url,":")
541 acc=url[1]
542 host=url[2]
543 dir=url[3]
544 filename=url[4]
545
546 if (index(dir,version)) {
547 # directory name as version maching mode:
548 # if /something/version/name-version.tarball then check
549 # in /something/ looking for newer directory
550 dir=substr(dir,1,index(dir,version)-1)
551 sub("[^/]*$","",dir)
552 sub("(\.tar\.(bz|bz2|gz|lzma|xz)|zip)$","",filename)
553 }
554
555 d("Will check a directory: " dir)
556 d("and a file: " filename)
557
558 filenameexp=filename
559 gsub("[+]","\\+",filenameexp)
560 sub(version,"[A-Za-z0-9.]+",filenameexp)
561 gsub("[.]","\\.",filenameexp)
562 sub("\.(bz|bz2|gz|lzma|xz|zip)$",".(bz|bz2|gz|lzma|xz|zip)",filenameexp)
563 d("Expression: " filenameexp)
564 match(filename,version)
565 prever=substr(filename,1,RSTART-1)
566 postver=substr(filename,RSTART+RLENGTH)
567 d("Before number: " prever)
568 d("and after: " postver)
569 newurl=find_mirror(acc "://" host dir)
570 #print acc "://" host dir
571 #newurl=url[1]"://"url[2]url[3]url[4]
572 #newurl=acc "://" host dir filename
573 d("Looking at " newurl)
574
575 references=0
576 finished=0
577 oldversion=version
578 odp = get_links(newurl, filename)
579 if( odp ~ "ERROR: ") {
580 print name "(" number ") " odp
581 } else {
582 d("WebPage downloaded")
583 c=split(odp,linki)
584 for (nr=1; nr<=c; nr++) {
585 addr=linki[nr]
586
587 d("Found link: " addr)
588
589 # Try not to treat foobar or foo-bar as (possibly newer) version of bar
590 # (practical cases: KXL, lineakconfig, mhash...)
591 # but don't skip cases where name is like "/some/link/0.12.2.tar.gz"
592 if ((addr ~ "[-_.0-9A-Za-z~]" filenameexp) && addr !~ "[-_.0-9A-Za-z~]/" filenameexp) {
593 continue
594 }
595
596 if (addr ~ filenameexp) {
597 match(addr,filenameexp)
598 newfilename=substr(addr,RSTART,RLENGTH)
599 d("Hypothetical new: " newfilename)
600 newfilename=fixedsub(prever,"",newfilename)
601 newfilename=fixedsub(postver,"",newfilename)
602 d("Version: " newfilename)
603 if (newfilename ~ /\.(asc|sig|pkg|bin|binary|built)$/) continue
604 # strip ending (happens when in directiory name as version matching mode)
605 sub("(\.tar\.(bz|bz2|gz|lzma|xz)|zip)$","",newfilename)
606 if (NUMERIC) {
607 if ( compare_ver_dec(version, newfilename)==1 ) {
608 d("Yes, there is new one")
609 version=newfilename
610 finished=1
611 }
612 } else if ( compare_ver(version, newfilename)==1 ) {
613 d("Yes, there is new one")
614 version=newfilename
615 finished=1
616 }
617 }
618 }
619 if (finished == 0)
620 print name "(" number ") seems ok: " oldversion
621 else
622 print name "(" number ") [OLD] " oldversion " [NEW] " version
623 }
624}
625
626function rss_upgrade(name, ver, url, regex, cmd) {
627 regex = "s/.*<title>" regex "<\/title>.*/\\1/p"
628 cmd = "wget -t 2 -T 45 -q -O - " url " | sed -nre '" regex "' | head -n1"
629
630 d("rss_upgrade_cmd: " cmd)
631 cmd | getline ver
632 close(cmd)
633
634 return ver
635}
636
637# check for ZF upgrade from rss
638function zf_upgrade(name, ver) {
639 return rss_upgrade(name, ver, \
640 "http://devzone.zend.com/tag/Zend_Framework_Management/format/rss2.0", \
641 "Zend Framework ([^\\s]+) Released" \
642 );
643}
644
645function hudson_upgrade(name, ver) {
646 return rss_upgrade(name, ver, \
647 "https://hudson.dev.java.net/servlets/ProjectRSS?type=news", \
648 "Hudson ([0-9.]+) released" \
649 );
650}
651
652# upgrade check for pear package using PEAR CLI
653function pear_upgrade(name, ver, cmd) {
654 sub(/^php-pear-/, "", name);
655
656 cmd = "pear remote-info " name " | awk '/^Latest/{print $NF}'"
657 d("PEAR: " cmd)
658 cmd | getline ver
659 close(cmd)
660
661 return ver
662}
663
664function vim_upgrade(name, ver, cmd) {
665 # %patchset_source -f ftp://ftp.vim.org/pub/editors/vim/patches/7.2/7.2.%03g 1 %{patchlevel}
666 cmd = "wget -q -O - ftp://ftp.vim.org/pub/editors/vim/patches/" DEFS["ver"] "/MD5SUMS|grep -vF .gz|tail -n1|awk '{print $2}'"
667 d("VIM: " cmd)
668 cmd | getline ver
669 close(cmd)
670 return ver
671}
672
673function nodejs_upgrade(name, ver, cmd) {
674 d("NODEJS " name " (as " DEFS["pkg"] ") " ver);
675 if (DEFS["pkg"]) {
676 cmd = "npm info " DEFS["pkg"] " dist-tags.latest"
677 } else {
678 cmd = "npm info " name " dist-tags.latest"
679 }
680 cmd | getline ver
681 close(cmd)
682
683 return ver
684}
685
686function rubygem_upgrade(name, ver, cmd, pkg) {
687 if (DEFS["gem_name"]) {
688 pkg = DEFS["gem_name"];
689
690 } else if (DEFS["gemname"]) {
691 pkg = DEFS["gemname"];
692
693 } else if (DEFS["pkgname"]) {
694 pkg = DEFS["pkgname"];
695
696 } else {
697 pkg = name;
698 gsub(/^ruby-/, "", pkg);
699 }
700
701 cmd = "gem list --remote '^" pkg "$' | awk '/" pkg "/ {v=$2; sub(/\(/, \"\", v); sub(/\)$/, \"\", v); print v}'"
702 d("RUBYGEM " name " (as " pkg ") " ver ": " cmd);
703 cmd | getline ver
704
705 close(cmd)
706
707 return ver
708}
709
710function google_linux_repo(name, ver, reponame, cmd, sourceurl) {
711 sourceurl = "http://dl.google.com/linux/" reponame "/rpm/stable/x86_64/repodata/primary.xml.gz"
712 cmd = "curl -s " sourceurl " | zcat | perl -ne 'm{<name>" name "-" DEFS["state"] "</name>} and m{<version .*ver=.([\d.]+)} and print $1'"
713 d("google repo: " cmd);
714 cmd | getline ver
715 close(cmd)
716
717 return ver
718}
719
720function jenkins_upgrade(name, ver, urls, url, i, c, chunks, nver) {
721 for (i in urls) {
722 url = urls[i]
723 # http://mirrors.jenkins-ci.org/war-stable/1.509.1/jenkins.war?/jenkins-1.509.1.war
724 gsub("/" ver "/jenkins.war\?/jenkins-" ver ".war", "/", url);
725 c = split(get_links(url), chunks, "/")
726 # new version is second one from the bottom
727 nver = chunks[c - 2]
728 gsub(/ /, "", nver)
729 return nver;
730 }
731}
732
733function process_data(name, ver, rel, src, nver, i) {
734 if (name ~ /^php-pear-/) {
735 nver = pear_upgrade(name, ver);
736 } else if (name == "ZendFramework") {
737 nver = zf_upgrade(name, ver);
738 } else if (name == "hudson") {
739 nver = hudson_upgrade(name, ver);
740 } else if (name == "vim") {
741 nver = vim_upgrade(name, ver);
742 } else if (name == "google-chrome") {
743 nver = google_linux_repo(name, ver, "chrome");
744 } else if (name == "google-talkplugin") {
745 nver = google_linux_repo(name, ver, "talkplugin");
746 } else if (name ~ "^nodejs-") {
747 nver = nodejs_upgrade(name, ver);
748 } else if (name ~ "^ruby-" || name == "chef") {
749 nver = rubygem_upgrade(name, ver);
750 } else if (name ~ "jenkins") {
751 nver = jenkins_upgrade(name, ver, src);
752 }
753
754 if (nver) {
755 if (compare_ver(ver, nver)) {
756 print name " [OLD] " ver " [NEW] " nver
757 } else {
758 print name " seems ok: " ver
759 }
760 return;
761 }
762
763 if (name == "xulrunner") {
764 ver = subst_defines(DEFS["firefox_ver"], DEFS)
765 d("package xulrunner, change version to firefox ["ver"]")
766 }
767
768# this function checks if substitutions were valid, and if true:
769# processes each URL and tries to get current file list
770 for (i in src) {
771 if (src[i] ~ /%{nil}/) {
772 gsub(/\%\{nil\}/, "", src[i])
773 }
774 if ( src[i] !~ /%{.*}/ && src[i] !~ /%[A-Za-z0-9_]/ ) {
775 d("Source: " src[i])
776 process_source(i, src[i], name, ver)
777 } else {
778 print FNAME ":" i ": impossible substitution: " src[i]
779 }
780 }
781}
782
783BEGIN {
784 # use perl links extraction by default
785 USE_PERL = 1
786
787 # if you want to use DEBUG, run script with "-v DEBUG=1"
788 # or uncomment the line below
789 # DEBUG = 1
790
791 errno=system("wget --help > /dev/null 2>&1")
792 if (errno && errno != 3) {
793 print "No wget installed!"
794 exit 1
795 }
796 if (ARGC>=3 && ARGV[2]=="-n") {
797 NUMERIC=1
798 for (i=3; i<ARGC; i++) ARGV[i-1]=ARGV[i]
799 ARGC=ARGC-1
800 }
801}
802
803FNR==1 {
804 if ( ARGIND != 1 ) {
805 # clean frameseen for each ARG
806 for (i in frameseen) {
807 delete frameseen[i]
808 }
809 frameseen[0] = 1
810
811 process_data(NAME,VER,REL,SRC)
812 NAME="" ; VER="" ; REL=""
813 for (i in DEFS) delete DEFS[i]
814 for (i in SRC) delete SRC[i]
815 }
816 FNAME=FILENAME
817 DEFS["_alt_kernel"]=""
818 DEFS["20"]="\\ "
819 DEFS["nil"]=""
820}
821
822/^[Uu][Rr][Ll]:/&&(URL=="") { URL=subst_defines($2,DEFS) ; DEFS["url"]=URL }
823/^[Nn]ame:/&&(NAME=="") { NAME=subst_defines($2,DEFS) ; DEFS["name"]=NAME }
824/^[Vv]ersion:/&&(VER=="") { VER=subst_defines($2,DEFS) ; DEFS["version"]=VER }
825/^[Rr]elease:/&&(REL=="") { REL=subst_defines($2,DEFS) ; DEFS["release"]=REL }
826/^[Ss]ource[0-9]*:/ { if (/(ftp|http|https):\/\//) SRC[FNR]=subst_defines($2,DEFS) }
827/%define/ { DEFS[$2]=subst_defines($3,DEFS) }
828
829END {
830 process_data(NAME,VER,REL,SRC)
831}
This page took 0.104963 seconds and 4 git commands to generate.