4 # Copyright (C) 2000-2013 PLD-Team <feedback@pld-linux.org>
6 # Sebastian Zagrodzki <zagrodzki@pld-linux.org>
7 # Jacek Konieczny <jajcus@pld-linux.org>
8 # Andrzej Krzysztofowicz <ankry@pld-linux.org>
9 # Jakub Bogusz <qboosh@pld-linux.org>
10 # Elan Ruusamäe <glen@pld-linux.org>
12 # See cvs log pldnotify.awk for list of contributors
15 # - "SourceXDownload" support (use given URLs if present instead of cut-down SourceX URLs)
16 # - "SourceXActiveFTP" support
17 # - support debian/watch http://wiki.debian.org/debian/watch/
20 # to test run this, run:
21 # $ awk -vDEBUG=1 pldnotify.awk < specfile
28 # print strftime("%Y-%m-%d %H:%M:%S ") s >> "/dev/stderr"
29 print s >> "/dev/stderr"
32 function fixedsub(s1,s2,t, ind) {
33 # substitutes fixed strings (not regexps)
34 if (ind = index(t,s1)) {
35 t = substr(t, 1, ind-1) s2 substr(t, ind+length(s1))
41 if ((s~"pre")||(s~"PRE")||(s~"beta")||(s~"BETA")||(s~"alpha")||(s~"ALPHA")||(s~"rc")||(s~"RC")) {
49 function compare_ver(v1,v2) {
50 # compares version numbers
51 while (match(v1,/[a-zA-Z][0-9]|[0-9][a-zA-Z]/))
52 v1=(substr(v1,1,RSTART) "." substr(v1,RSTART+RLENGTH-1))
53 while (match(v2,/[a-zA-Z][0-9]|[0-9][a-zA-Z]/))
54 v2=(substr(v2,1,RSTART) "." substr(v2,RSTART+RLENGTH-1))
61 count=split(v1,v1a,"\.")
62 count2=split(v2,v2a,"\.")
64 if (count<count2) mincount=count
67 for (i=1; i<=mincount; i++) {
68 if (v1a[i]=="") v1a[i]=0
69 if (v2a[i]=="") v2a[i]=0
73 if ((v1a[i]~/[0-9]/)&&(v2a[i]~/[0-9]/)) {
74 if (length(v2a[i])>length(v1a[i]))
76 else if (v2a[i]>v1a[i])
78 else if (length(v1a[i])>length(v2a[i]))
80 else if (v1a[i]>v2a[i])
82 } else if ((v1a[i]~/[A-Za-z]/)&&(v2a[i]~/[A-Za-z]/)) {
85 else if (v1a[i]>v2a[i])
87 } else if (ispre(v1a[i]) == 1)
92 if ((count2==mincount)&&(count!=count2)) {
93 for (i=count2+1; i<=count; i++)
94 if (ispre(v1a[i]) == 1)
97 } else if (count!=count2) {
98 for (i=count+1; i<=count2; i++)
99 if (ispre(v2a[i]) == 1)
106 function compare_ver_dec(v1,v2) {
107 # compares version numbers as decimal floats
108 while (match(v1,/[0-9][a-zA-Z]/))
109 v1=(substr(v1,1,RSTART) "." substr(v1,RSTART+RLENGTH-1))
110 while (match(v2,/[0-9][a-zA-Z]/))
111 v2=(substr(v2,1,RSTART) "." substr(v2,RSTART+RLENGTH-1))
116 count=split(v1,v1a,"\.")
117 count2=split(v2,v2a,"\.")
119 if (count<count2) mincount=count
122 for (i=1; i<=mincount; i++) {
123 if (v1a[i]=="") v1a[i]=0
124 if (v2a[i]=="") v2a[i]=0
126 d("v1[i] == " v1a[i])
127 d("v2[i] == " v2a[i])
128 if ((v1a[i]~/[0-9]/)&&(v2a[i]~/[0-9]/)) {
130 if (0+("." v2a[i])>0+("." v1a[i]))
132 else if (0+("." v1a[i])>0+("." v2a[i]))
135 if (length(v2a[i])>length(v1a[i]))
137 else if (v2a[i]>v1a[i])
139 else if (length(v1a[i])>length(v2a[i]))
141 else if (v1a[i]>v2a[i])
144 } else if ((v1a[i]~/[A-Za-z]/)&&(v2a[i]~/[A-Za-z]/)) {
147 else if (v1a[i]>v2a[i])
149 } else if (ispre(v1a[i]) == 1)
154 if ((count2==mincount)&&(count!=count2)) {
155 for (i=count2+1; i<=count; i++)
156 if (ispre(v1a[i]) == 1)
159 } else if (count!=count2) {
160 for (i=count+1; i<=count2; i++)
161 if (ispre(v2a[i]) == 1)
168 function link_seen(link) {
169 for (seenlink in frameseen) {
170 if (seenlink == link) {
171 d("Link: [" link "] seen already, skipping...")
179 function mktemp( _cmd, _tmpfile) {
180 _cmd = "mktemp /tmp/XXXXXX"
181 _cmd | getline _tmpfile
186 # fix link to artificial one that will be recognized rest of this script
187 function postfix_link(url, link, oldlink) {
189 if ((url ~/^(http|https):\/\/github.com\//) && (link ~ /.*\/tarball\//)) {
190 gsub(".*\/tarball\/", "", link)
191 link = link ".tar.gz"
193 if (oldlink != link) {
194 d("POST FIXED URL [ " oldlink " ] to [ " link " ]")
199 # use perl HTML::TreeBuilder module to extract links from html
200 # it returns TAGNAME LINK in output which is pretty stright forward to parse in awk
201 function extract_links_cmd(tmpfile) {
202 return "perl -MHTML::TreeBuilder -e ' \
203 my $content = join q//, <>; \
204 my $root = new HTML::TreeBuilder; \
205 $root->parse($content); \
208 for (@{$root->extract_links(qw(a iframe))}) { \
209 my($link, $element, $attr, $tag) = @$_; \
210 $links{$link} = $tag; \
213 while (my($link, $tag) = each %links) { \
214 print $tag, q/ /, $link, $/; \
219 # get all <A HREF=..> tags from specified URL
220 function get_links(url,filename, errno,link,oneline,retval,odp,wholeodp,lowerodp,tmpfile,cmd) {
225 tmpfileerr = mktemp()
227 if (url ~ /^http:\/\/(download|dl)\.(sf|sourceforge)\.net\//) {
228 # http://downloads.sourceforge.net/project/mediainfo/source/mediainfo/
229 gsub("^http://(download|dl)\.(sf|sourceforge)\.net/", "", url)
232 url = "http://sourceforge.net/projects/" url "/files/"
233 d("sf url, mungled url to: " url)
236 if (url ~ /^http:\/\/(.*)\.googlecode\.com\/files\//) {
237 gsub("^http://", "", url)
238 gsub("\..*", "", url)
239 url = "http://code.google.com/p/" url "/downloads/list"
240 d("googlecode url, mungled url to: " url)
243 if (url ~ /^http:\/\/pecl.php.net\/get\//) {
244 gsub("-.*", "", filename)
245 url = "http://pecl.php.net/package/" filename
246 d("pecl.php.net url, mungled url to: " url)
249 if (url ~ /^(http|ftp):\/\/mysql.*\/Downloads\/MySQL-5.1\//) {
250 url = "http://dev.mysql.com/downloads/mysql/5.1.html#source"
251 d("mysql 5.1 url, mungled url to: " url)
254 if (url ~/^(http|https):\/\/launchpad\.net\/(.*)\//) {
255 gsub("^(http|https):\/\/launchpad\.net\/", "", url)
256 gsub("\/.*/", "", url)
257 url = "https://code.launchpad.net/" url "/+download"
258 d("main launchpad url, mungled url to: " url)
261 if (url ~/^(http|https):\/\/edge\.launchpad\.net\/(.*)\//) {
262 gsub("^(http|https):\/\/edge\.launchpad\.net\/", "", url)
263 gsub("\/.*/", "", url)
264 url = "https://edge.launchpad.net/" url "/+download"
265 d("edge launchpad url, mungled url to: " url)
268 if (url ~/^(http|https):\/\/github.com\/.*\/(.*)\/tarball\//) {
269 gsub("\/tarball\/.*", "/downloads", url)
270 d("github tarball url, mungled url to: " url)
273 if (url ~/^(http|https):\/\/cgit\..*\/(.*)\/snapshot\//) {
274 gsub("\/snapshot\/.*", "/", url)
275 d("cgit snapshot tarball url, mungled url to: " url)
278 if (url ~/^(http|https):\/\/www2\.aquamaniac\.de\/sites\/download\//) {
279 url = "http://www2.aquamaniac.de/sites/download/packages.php"
280 d("aquamaniac.de tarball url, mungled url to: " url)
283 d("Retrieving: " url)
284 user_agent = "Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2) Gecko/20100129 PLD/3.0 (Th) Iceweasel/3.6"
285 cmd = "wget -t 2 -T 45 --user-agent \"" user_agent "\" -nv -O - \"" url "\" --passive-ftp --no-check-certificate > " tmpfile " 2> " tmpfileerr
291 d("Reading failure response...")
293 while (getline oneline < tmpfileerr)
294 wholeerr = (wholeerr " " oneline)
295 d("Error Response: " wholeerr)
297 system("rm -f " tmpfile)
298 system("rm -f " tmpfileerr)
299 retval = ("WGET ERROR: " errno ": " wholeerr)
302 system("rm -f " tmpfileerr)
305 sub(/[^\/]+$/, "", urldir)
308 cmd = extract_links_cmd(tmpfile)
309 while (cmd | getline) {
311 link = substr($0, length(tag) + 2)
313 if (tag == "iframe") {
320 if (link_seen(link)) {
323 retval = (retval " " get_links(link))
326 if (link_seen(link)) {
330 retval = (retval " " link)
334 system("rm -f " tmpfile)
336 d("Returning: [" retval "]")
341 d("Reading success response...")
342 while (getline oneline < tmpfile) {
343 wholeodp = (wholeodp " " oneline)
344 # d("Response: " wholeodp)
346 d("Reponse read done...")
347 system("rm -f " tmpfile)
349 while (match(wholeodp, /<([aA]|[fF][rR][aA][mM][eE])[ \t][^>]*>/) > 0) {
350 d("Processing links...")
351 odp = substr(wholeodp,RSTART,RLENGTH);
352 wholeodp = substr(wholeodp,RSTART+RLENGTH);
354 lowerodp = tolower(odp);
355 if (lowerodp ~ /<frame[ \t]/) {
356 sub(/[sS][rR][cC]=[ \t]*/, "src=", odp);
357 match(odp, /src="[^"]+"/)
358 newurl = substr(odp, RSTART+5, RLENGTH-6)
360 if (newurl !~ /\//) {
361 newurl=(urldir newurl)
362 d("Frame->: " newurl)
365 if (link_seen(newurl)) {
370 retval = (retval " " get_links(newurl))
371 } else if (lowerodp ~ /href=[ \t]*"[^"]*"/) {
372 sub(/[hH][rR][eE][fF]=[ \t]*"/,"href=\"",odp)
373 match(odp,/href="[^"]*"/)
374 link=substr(odp,RSTART,RLENGTH)
375 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
376 link=substr(link,7,length(link)-7)
377 link=postfix_link(url, link)
379 if (link_seen(link)) {
384 # link ends with at least 2 digit version
386 if (link ~ /^.*\/[v]*[0-9\.]+[0-9]\/$/)
387 mlink = get_links(link)
389 retval = (retval " " link " " mlink)
390 d("href(\"\"): " link)
391 } else if (lowerodp ~ /href=[ \t]*'[^']*'/) {
392 sub(/[hH][rR][eE][fF]=[ \t]*'/,"href='",odp)
393 match(odp,/href='[^']*'/)
394 link=substr(odp,RSTART,RLENGTH)
395 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
396 link=substr(link,7,length(link)-7)
397 link=postfix_link(url, link)
399 if (link_seen(link)) {
404 retval = (retval " " link)
406 } else if (lowerodp ~ /href=[ \t]*[^ \t>]*/) {
407 sub(/[hH][rR][eE][fF]=[ \t]*/,"href=",odp)
408 match(odp,/href=[^ \t>]*/)
409 link=substr(odp,RSTART,RLENGTH)
410 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
411 link=substr(link,6,length(link)-5)
413 if (link_seen(link)) {
418 retval = (retval " " link)
421 # <a ...> but not href - skip
422 d("skipping <a > without href: " odp)
426 d("Returning: [" retval "]")
430 function subst_defines(var,defs) {
431 # substitute all possible RPM macros
432 while ((var ~ /%{.*}/) || (var ~ /%[A-Za-z0-9_]+/)) {
435 gsub("%{" j "}", defs[j], var)
436 gsub("%" j , defs[j], var)
450 function find_mirror(url) {
452 while (succ = (getline line < "mirrors")) {
453 if (succ==-1) { return url }
454 nf=split(line,fields,"|")
459 prefix=substr(url,1,length(origin))
461 d("Mirror found at " mname)
463 return mirror substr(url,length(origin)+1)
471 # fetches file list, and compares version numbers
472 function process_source(number, lurl, name, version) {
473 d("Processing " lurl)
475 if (index(lurl, version) == 0) {
476 d("There is no version number ["version"] in ["lurl"]")
482 gsub("[^/]*$",":&",lurl)
489 if (index(dir,version)) {
490 # directory name as version maching mode:
491 # if /something/version/name-version.tarball then check
492 # in /something/ looking for newer directory
493 dir=substr(dir,1,index(dir,version)-1)
495 sub("(\.tar\.(bz|bz2|gz|lzma|xz)|zip)$","",filename)
498 d("Will check a directory: " dir)
499 d("and a file: " filename)
502 gsub("[+]","\\+",filenameexp)
503 sub(version,"[A-Za-z0-9.]+",filenameexp)
504 gsub("[.]","\\.",filenameexp)
505 d("Expression: " filenameexp)
506 match(filename,version)
507 prever=substr(filename,1,RSTART-1)
508 postver=substr(filename,RSTART+RLENGTH)
509 d("Before number: " prever)
510 d("and after: " postver)
511 newurl=find_mirror(acc "://" host dir)
512 #print acc "://" host dir
513 #newurl=url[1]"://"url[2]url[3]url[4]
514 #newurl=acc "://" host dir filename
515 d("Looking at " newurl)
520 odp = get_links(newurl, filename)
521 if( odp ~ "ERROR: ") {
522 print name "(" number ") " odp
524 d("WebPage downloaded")
526 for (nr=1; nr<=c; nr++) {
529 d("Found link: " addr)
531 # github has very different tarball links that clash with this safe check
532 if (!(newurl ~/^(http|https):\/\/github.com\/.*\/tarball/)) {
533 if (addr ~ "[-_.0-9A-Za-z~]" filenameexp) {
538 if (addr ~ filenameexp) {
539 match(addr,filenameexp)
540 newfilename=substr(addr,RSTART,RLENGTH)
541 d("Hypothetical new: " newfilename)
542 newfilename=fixedsub(prever,"",newfilename)
543 newfilename=fixedsub(postver,"",newfilename)
544 d("Version: " newfilename)
545 if (newfilename ~ /\.(asc|sig|pkg|bin|binary|built)$/) continue
546 # strip ending (happens when in directiory name as version matching mode)
547 sub("(\.tar\.(bz|bz2|gz|lzma|xz)|zip)$","",newfilename)
549 if ( compare_ver_dec(version, newfilename)==1 ) {
550 d("Yes, there is new one")
554 } else if ( compare_ver(version, newfilename)==1 ) {
555 d("Yes, there is new one")
562 print name "(" number ") seems ok: " oldversion
564 print name "(" number ") [OLD] " oldversion " [NEW] " version
568 function rss_upgrade(name, ver, url, regex, cmd, nver) {
569 regex = "s/.*<title>" regex "<\/title>.*/\\1/p"
570 cmd = "wget -t 2 -T 45 -q -O - " url " | sed -nre '" regex "' | head -n1"
572 d("rss_upgrade_cmd: " cmd)
579 # check for ZF upgrade from rss
580 function zf_upgrade(name, ver) {
581 return rss_upgrade(name, ver, \
582 "http://devzone.zend.com/tag/Zend_Framework_Management/format/rss2.0", \
583 "Zend Framework ([^\\s]+) Released" \
587 function hudson_upgrade(name, ver) {
588 return rss_upgrade(name, ver, \
589 "https://hudson.dev.java.net/servlets/ProjectRSS?type=news", \
590 "Hudson ([0-9.]+) released" \
594 # upgrade check for pear package using PEAR CLI
595 function pear_upgrade(name, ver, pname, pearcmd, nver) {
597 sub(/^php-pear-/, "", pname);
599 pearcmd = "pear remote-info " pname " | awk '/^Latest/{print $NF}'"
600 d("pearcmd: " pearcmd)
601 pearcmd | getline nver
607 function vim_upgrade(name, ver, mver, nver, vimcmd) {
608 # %patchset_source -f ftp://ftp.vim.org/pub/editors/vim/patches/7.2/7.2.%03g 1 %{patchlevel}
609 mver = substr(ver, 0, 3)
610 vimcmd = "wget -q -O - ftp://ftp.vim.org/pub/editors/vim/patches/" mver "/MD5SUMS|grep -vF .gz|tail -n1|awk '{print $2}'"
612 vimcmd | getline nver
618 function nodejs_upgrade(name, ver, cmd, nver) {
619 d("NODEJS " name " (as " DEFS["pkg"] ") " ver);
621 cmd = "npm info " DEFS["pkg"] " dist-tags.latest"
623 cmd = "npm info " name " dist-tags.latest"
631 function chrome_upgrade(name, ver, cmd, sourceurl) {
632 sourceurl = "http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/primary.xml.gz"
633 cmd = "curl -s " sourceurl " | zcat | perl -ne 'm{<name>google-chrome-" DEFS["state"] "</name>} and m{<version .*ver=.([\d.]+)} and print $1'"
641 function process_data(name, ver, rel, src, nver) {
642 if (name ~ /^php-pear-/) {
643 nver = pear_upgrade(name, ver);
644 } else if (name == "ZendFramework") {
645 nver = zf_upgrade(name, ver);
646 } else if (name == "hudson") {
647 nver = hudson_upgrade(name, ver);
648 } else if (name == "vim") {
649 nver = vim_upgrade(name, ver);
650 } else if (name == "google-chrome") {
651 nver = chrome_upgrade(name, ver);
652 } else if (name ~ "^nodejs-") {
653 nver = nodejs_upgrade(name, ver);
657 if (compare_ver(ver, nver)) {
658 print name " [OLD] " ver " [NEW] " nver
660 print name " seems ok: " ver
665 if (name == "xulrunner") {
666 ver = subst_defines(DEFS["firefox_ver"], DEFS)
667 d("package xulrunner, change version to firefox ["ver"]")
670 # this function checks if substitutions were valid, and if true:
671 # processes each URL and tries to get current file list
673 if (src[i] ~ /%{nil}/) {
674 gsub(/\%\{nil\}/, "", src[i])
676 if ( src[i] !~ /%{.*}/ && src[i] !~ /%[A-Za-z0-9_]/ ) {
678 process_source(i, src[i], name, ver)
680 print FNAME ":" i ": impossible substitution: " src[i]
686 # if you want to use DEBUG, run script with "-v DEBUG=1"
687 # or uncomment the line below
690 errno=system("wget --help > /dev/null 2>&1")
691 if (errno && errno != 3) {
692 print "No wget installed!"
695 if (ARGC>=3 && ARGV[2]=="-n") {
697 for (i=3; i<ARGC; i++) ARGV[i-1]=ARGV[i]
704 # clean frameseen for each ARG
705 for (i in frameseen) {
710 process_data(NAME,VER,REL,SRC)
711 NAME="" ; VER="" ; REL=""
712 for (i in DEFS) delete DEFS[i]
713 for (i in SRC) delete SRC[i]
716 DEFS["_alt_kernel"]=""
721 /^[Uu][Rr][Ll]:/&&(URL=="") { URL=subst_defines($2,DEFS) ; DEFS["url"]=URL }
722 /^[Nn]ame:/&&(NAME=="") { NAME=subst_defines($2,DEFS) ; DEFS["name"]=NAME }
723 /^[Vv]ersion:/&&(VER=="") { VER=subst_defines($2,DEFS) ; DEFS["version"]=VER }
724 /^[Rr]elease:/&&(REL=="") { REL=subst_defines($2,DEFS) ; DEFS["release"]=REL }
725 /^[Ss]ource[0-9]*:/ { if (/(ftp|http|https):\/\//) SRC[FNR]=subst_defines($2,DEFS) }
726 /%define/ { DEFS[$2]=subst_defines($3,DEFS) }
729 process_data(NAME,VER,REL,SRC)