3 # Copyright (C) 2000-2013 PLD-Team <feedback@pld-linux.org>
5 # Sebastian Zagrodzki <zagrodzki@pld-linux.org>
6 # Jacek Konieczny <jajcus@pld-linux.org>
7 # Andrzej Krzysztofowicz <ankry@pld-linux.org>
8 # Jakub Bogusz <qboosh@pld-linux.org>
9 # Elan Ruusamäe <glen@pld-linux.org>
11 # See cvs log pldnotify.awk for list of contributors
14 # - "SourceXDownload" support (use given URLs if present instead of cut-down SourceX URLs)
15 # - "SourceXActiveFTP" support
16 # - support debian/watch http://wiki.debian.org/debian/watch/
19 # to test run this, run:
20 # $ awk -vDEBUG=1 pldnotify.awk < specfile
22 # To get full out of it, you need to have following tools installed:
23 # - perl, sed, wget, coreutils, util-linux
24 # - perl-HTML-Tree (HTML::TreeBuilder module) for better links parser (-vUSE_PERL=0 to disable)
25 # - php-pear-PEAR for php-pear package updates
26 # - npm for nodejs packages
28 # Additionally "mirrors" file in current dir, controls local mirrors you prefer
35 # print strftime("%Y-%m-%d %H:%M:%S ") s >> "/dev/stderr"
36 print s >> "/dev/stderr"
39 function fixedsub(s1,s2,t, ind) {
40 # substitutes fixed strings (not regexps)
41 if (ind = index(t,s1)) {
42 t = substr(t, 1, ind-1) s2 substr(t, ind+length(s1))
48 if ((s~"pre")||(s~"PRE")||(s~"beta")||(s~"BETA")||(s~"alpha")||(s~"ALPHA")||(s~"rc")||(s~"RC")) {
56 function compare_ver(v1,v2) {
57 # compares version numbers
58 while (match(v1,/[a-zA-Z][0-9]|[0-9][a-zA-Z]/))
59 v1=(substr(v1,1,RSTART) "." substr(v1,RSTART+RLENGTH-1))
60 while (match(v2,/[a-zA-Z][0-9]|[0-9][a-zA-Z]/))
61 v2=(substr(v2,1,RSTART) "." substr(v2,RSTART+RLENGTH-1))
68 count=split(v1,v1a,"\.")
69 count2=split(v2,v2a,"\.")
71 if (count<count2) mincount=count
74 for (i=1; i<=mincount; i++) {
75 if (v1a[i]=="") v1a[i]=0
76 if (v2a[i]=="") v2a[i]=0
80 if ((v1a[i]~/[0-9]/)&&(v2a[i]~/[0-9]/)) {
81 if (length(v2a[i])>length(v1a[i]))
83 else if (v2a[i]>v1a[i])
85 else if (length(v1a[i])>length(v2a[i]))
87 else if (v1a[i]>v2a[i])
89 } else if ((v1a[i]~/[A-Za-z]/)&&(v2a[i]~/[A-Za-z]/)) {
92 else if (v1a[i]>v2a[i])
94 } else if (ispre(v1a[i]) == 1)
99 if ((count2==mincount)&&(count!=count2)) {
100 for (i=count2+1; i<=count; i++)
101 if (ispre(v1a[i]) == 1)
104 } else if (count!=count2) {
105 for (i=count+1; i<=count2; i++)
106 if (ispre(v2a[i]) == 1)
113 function compare_ver_dec(v1,v2) {
114 # compares version numbers as decimal floats
115 while (match(v1,/[0-9][a-zA-Z]/))
116 v1=(substr(v1,1,RSTART) "." substr(v1,RSTART+RLENGTH-1))
117 while (match(v2,/[0-9][a-zA-Z]/))
118 v2=(substr(v2,1,RSTART) "." substr(v2,RSTART+RLENGTH-1))
123 count=split(v1,v1a,"\.")
124 count2=split(v2,v2a,"\.")
126 if (count<count2) mincount=count
129 for (i=1; i<=mincount; i++) {
130 if (v1a[i]=="") v1a[i]=0
131 if (v2a[i]=="") v2a[i]=0
133 d("v1[i] == " v1a[i])
134 d("v2[i] == " v2a[i])
135 if ((v1a[i]~/[0-9]/)&&(v2a[i]~/[0-9]/)) {
137 if (0+("." v2a[i])>0+("." v1a[i]))
139 else if (0+("." v1a[i])>0+("." v2a[i]))
142 if (length(v2a[i])>length(v1a[i]))
144 else if (v2a[i]>v1a[i])
146 else if (length(v1a[i])>length(v2a[i]))
148 else if (v1a[i]>v2a[i])
151 } else if ((v1a[i]~/[A-Za-z]/)&&(v2a[i]~/[A-Za-z]/)) {
154 else if (v1a[i]>v2a[i])
156 } else if (ispre(v1a[i]) == 1)
161 if ((count2==mincount)&&(count!=count2)) {
162 for (i=count2+1; i<=count; i++)
163 if (ispre(v1a[i]) == 1)
166 } else if (count!=count2) {
167 for (i=count+1; i<=count2; i++)
168 if (ispre(v2a[i]) == 1)
175 function link_seen(link) {
176 for (seenlink in frameseen) {
177 if (seenlink == link) {
178 d("Link: [" link "] seen already, skipping...")
186 function mktemp( _cmd, _tmpfile) {
187 _cmd = "mktemp /tmp/XXXXXX"
188 _cmd | getline _tmpfile
193 # fix link to artificial one that will be recognized rest of this script
194 function postfix_link(url, link, oldlink) {
196 if ((url ~/^(http|https):\/\/github.com\//) && (link ~ /.*\/tarball\//)) {
197 gsub(".*\/tarball\/", "", link)
198 link = link ".tar.gz"
200 if (oldlink != link) {
201 d("POST FIXED URL [ " oldlink " ] to [ " link " ]")
206 # use perl HTML::TreeBuilder module to extract links from html
207 # it returns TAGNAME LINK in output which is pretty stright forward to parse in awk
208 function extract_links_cmd(tmpfile) {
209 return "perl -MHTML::TreeBuilder -e ' \
210 my $content = join q//, <>; \
211 my $root = new HTML::TreeBuilder; \
212 $root->parse($content); \
215 for (@{$root->extract_links(qw(a iframe))}) { \
216 my($link, $element, $attr, $tag) = @$_; \
217 $links{$link} = $tag; \
220 while (my($link, $tag) = each %links) { \
221 print $tag, q/ /, $link, $/; \
226 # get all <A HREF=..> tags from specified URL
227 function get_links(url,filename, errno,link,oneline,retval,odp,wholeodp,lowerodp,tmpfile,cmd) {
232 tmpfileerr = mktemp()
234 if (url ~ /^http:\/\/(download|dl)\.(sf|sourceforge)\.net\//) {
235 # http://downloads.sourceforge.net/project/mediainfo/source/mediainfo/
236 gsub("^http://(download|dl)\.(sf|sourceforge)\.net/", "", url)
239 url = "http://sourceforge.net/projects/" url "/files/"
240 d("sf url, mungled url to: " url)
243 if (url ~ /^http:\/\/(.*)\.googlecode\.com\/files\//) {
244 gsub("^http://", "", url)
245 gsub("\..*", "", url)
246 url = "http://code.google.com/p/" url "/downloads/list"
247 d("googlecode url, mungled url to: " url)
250 if (url ~ /^http:\/\/pecl.php.net\/get\//) {
251 gsub("-.*", "", filename)
252 url = "http://pecl.php.net/package/" filename
253 d("pecl.php.net url, mungled url to: " url)
256 if (url ~ /^(http|ftp):\/\/mysql.*\/Downloads\/MySQL-5.1\//) {
257 url = "http://dev.mysql.com/downloads/mysql/5.1.html#source"
258 d("mysql 5.1 url, mungled url to: " url)
261 if (url ~/^(http|https):\/\/launchpad\.net\/(.*)\//) {
262 gsub("^(http|https):\/\/launchpad\.net\/", "", url)
263 gsub("\/.*/", "", url)
264 url = "https://code.launchpad.net/" url "/+download"
265 d("main launchpad url, mungled url to: " url)
268 if (url ~/^(http|https):\/\/edge\.launchpad\.net\/(.*)\//) {
269 gsub("^(http|https):\/\/edge\.launchpad\.net\/", "", url)
270 gsub("\/.*/", "", url)
271 url = "https://edge.launchpad.net/" url "/+download"
272 d("edge launchpad url, mungled url to: " url)
275 if (url ~/^(http|https):\/\/github.com\/.*\/(.*)\/tarball\//) {
276 gsub("\/tarball\/.*", "/downloads", url)
277 d("github tarball url, mungled url to: " url)
280 if (url ~/^(http|https):\/\/cgit\..*\/(.*)\/snapshot\//) {
281 gsub("\/snapshot\/.*", "/", url)
282 d("cgit snapshot tarball url, mungled url to: " url)
285 if (url ~/^(http|https):\/\/www2\.aquamaniac\.de\/sites\/download\//) {
286 url = "http://www2.aquamaniac.de/sites/download/packages.php"
287 d("aquamaniac.de tarball url, mungled url to: " url)
290 d("Retrieving: " url)
291 user_agent = "Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2) Gecko/20100129 PLD/3.0 (Th) Iceweasel/3.6"
292 cmd = "wget -t 2 -T 45 --user-agent \"" user_agent "\" -nv -O - \"" url "\" --passive-ftp --no-check-certificate > " tmpfile " 2> " tmpfileerr
298 d("Reading failure response...")
300 while (getline oneline < tmpfileerr)
301 wholeerr = (wholeerr " " oneline)
302 d("Error Response: " wholeerr)
304 system("rm -f " tmpfile)
305 system("rm -f " tmpfileerr)
306 retval = ("WGET ERROR: " errno ": " wholeerr)
309 system("rm -f " tmpfileerr)
312 sub(/[^\/]+$/, "", urldir)
315 cmd = extract_links_cmd(tmpfile)
316 while (cmd | getline) {
318 link = substr($0, length(tag) + 2)
320 if (tag == "iframe") {
327 if (link_seen(link)) {
330 retval = (retval " " get_links(link))
333 if (link_seen(link)) {
337 retval = (retval " " link)
341 system("rm -f " tmpfile)
343 d("Returning: [" retval "]")
348 d("Reading success response...")
349 while (getline oneline < tmpfile) {
350 wholeodp = (wholeodp " " oneline)
351 # d("Response: " wholeodp)
353 d("Reponse read done...")
354 system("rm -f " tmpfile)
356 while (match(wholeodp, /<([aA]|[fF][rR][aA][mM][eE])[ \t][^>]*>/) > 0) {
357 d("Processing links...")
358 odp = substr(wholeodp,RSTART,RLENGTH);
359 wholeodp = substr(wholeodp,RSTART+RLENGTH);
361 lowerodp = tolower(odp);
362 if (lowerodp ~ /<frame[ \t]/) {
363 sub(/[sS][rR][cC]=[ \t]*/, "src=", odp);
364 match(odp, /src="[^"]+"/)
365 newurl = substr(odp, RSTART+5, RLENGTH-6)
367 if (newurl !~ /\//) {
368 newurl=(urldir newurl)
369 d("Frame->: " newurl)
372 if (link_seen(newurl)) {
377 retval = (retval " " get_links(newurl))
378 } else if (lowerodp ~ /href=[ \t]*"[^"]*"/) {
379 sub(/[hH][rR][eE][fF]=[ \t]*"/,"href=\"",odp)
380 match(odp,/href="[^"]*"/)
381 link=substr(odp,RSTART,RLENGTH)
382 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
383 link=substr(link,7,length(link)-7)
384 link=postfix_link(url, link)
386 if (link_seen(link)) {
391 # link ends with at least 2 digit version
393 if (link ~ /^.*\/[v]*[0-9\.]+[0-9]\/$/)
394 mlink = get_links(link)
396 retval = (retval " " link " " mlink)
397 d("href(\"\"): " link)
398 } else if (lowerodp ~ /href=[ \t]*'[^']*'/) {
399 sub(/[hH][rR][eE][fF]=[ \t]*'/,"href='",odp)
400 match(odp,/href='[^']*'/)
401 link=substr(odp,RSTART,RLENGTH)
402 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
403 link=substr(link,7,length(link)-7)
404 link=postfix_link(url, link)
406 if (link_seen(link)) {
411 retval = (retval " " link)
413 } else if (lowerodp ~ /href=[ \t]*[^ \t>]*/) {
414 sub(/[hH][rR][eE][fF]=[ \t]*/,"href=",odp)
415 match(odp,/href=[^ \t>]*/)
416 link=substr(odp,RSTART,RLENGTH)
417 odp=substr(odp,1,RSTART) substr(odp,RSTART+RLENGTH)
418 link=substr(link,6,length(link)-5)
420 if (link_seen(link)) {
425 retval = (retval " " link)
428 # <a ...> but not href - skip
429 d("skipping <a > without href: " odp)
433 d("Returning: [" retval "]")
437 function subst_defines(var,defs) {
438 # substitute all possible RPM macros
439 while ((var ~ /%{.*}/) || (var ~ /%[A-Za-z0-9_]+/)) {
442 gsub("%{" j "}", defs[j], var)
443 gsub("%" j , defs[j], var)
457 function find_mirror(url) {
459 while (succ = (getline line < "mirrors")) {
460 if (succ==-1) { return url }
461 nf=split(line,fields,"|")
466 prefix=substr(url,1,length(origin))
468 d("Mirror found at " mname)
470 return mirror substr(url,length(origin)+1)
478 # fetches file list, and compares version numbers
479 function process_source(number, lurl, name, version) {
480 d("Processing " lurl)
482 if (index(lurl, version) == 0) {
483 d("There is no version number ["version"] in ["lurl"]")
489 gsub("[^/]*$",":&",lurl)
496 if (index(dir,version)) {
497 # directory name as version maching mode:
498 # if /something/version/name-version.tarball then check
499 # in /something/ looking for newer directory
500 dir=substr(dir,1,index(dir,version)-1)
502 sub("(\.tar\.(bz|bz2|gz|lzma|xz)|zip)$","",filename)
505 d("Will check a directory: " dir)
506 d("and a file: " filename)
509 gsub("[+]","\\+",filenameexp)
510 sub(version,"[A-Za-z0-9.]+",filenameexp)
511 gsub("[.]","\\.",filenameexp)
512 d("Expression: " filenameexp)
513 match(filename,version)
514 prever=substr(filename,1,RSTART-1)
515 postver=substr(filename,RSTART+RLENGTH)
516 d("Before number: " prever)
517 d("and after: " postver)
518 newurl=find_mirror(acc "://" host dir)
519 #print acc "://" host dir
520 #newurl=url[1]"://"url[2]url[3]url[4]
521 #newurl=acc "://" host dir filename
522 d("Looking at " newurl)
527 odp = get_links(newurl, filename)
528 if( odp ~ "ERROR: ") {
529 print name "(" number ") " odp
531 d("WebPage downloaded")
533 for (nr=1; nr<=c; nr++) {
536 d("Found link: " addr)
538 # github has very different tarball links that clash with this safe check
539 if (!(newurl ~/^(http|https):\/\/github.com\/.*\/tarball/)) {
540 if (addr ~ "[-_.0-9A-Za-z~]" filenameexp) {
545 if (addr ~ filenameexp) {
546 match(addr,filenameexp)
547 newfilename=substr(addr,RSTART,RLENGTH)
548 d("Hypothetical new: " newfilename)
549 newfilename=fixedsub(prever,"",newfilename)
550 newfilename=fixedsub(postver,"",newfilename)
551 d("Version: " newfilename)
552 if (newfilename ~ /\.(asc|sig|pkg|bin|binary|built)$/) continue
553 # strip ending (happens when in directiory name as version matching mode)
554 sub("(\.tar\.(bz|bz2|gz|lzma|xz)|zip)$","",newfilename)
556 if ( compare_ver_dec(version, newfilename)==1 ) {
557 d("Yes, there is new one")
561 } else if ( compare_ver(version, newfilename)==1 ) {
562 d("Yes, there is new one")
569 print name "(" number ") seems ok: " oldversion
571 print name "(" number ") [OLD] " oldversion " [NEW] " version
575 function rss_upgrade(name, ver, url, regex, cmd) {
576 regex = "s/.*<title>" regex "<\/title>.*/\\1/p"
577 cmd = "wget -t 2 -T 45 -q -O - " url " | sed -nre '" regex "' | head -n1"
579 d("rss_upgrade_cmd: " cmd)
586 # check for ZF upgrade from rss
587 function zf_upgrade(name, ver) {
588 return rss_upgrade(name, ver, \
589 "http://devzone.zend.com/tag/Zend_Framework_Management/format/rss2.0", \
590 "Zend Framework ([^\\s]+) Released" \
594 function hudson_upgrade(name, ver) {
595 return rss_upgrade(name, ver, \
596 "https://hudson.dev.java.net/servlets/ProjectRSS?type=news", \
597 "Hudson ([0-9.]+) released" \
601 # upgrade check for pear package using PEAR CLI
602 function pear_upgrade(name, ver, cmd) {
603 sub(/^php-pear-/, "", name);
605 cmd = "pear remote-info " name " | awk '/^Latest/{print $NF}'"
613 function vim_upgrade(name, ver, cmd) {
614 # %patchset_source -f ftp://ftp.vim.org/pub/editors/vim/patches/7.2/7.2.%03g 1 %{patchlevel}
615 cmd = "wget -q -O - ftp://ftp.vim.org/pub/editors/vim/patches/" DEFS["ver"] "/MD5SUMS|grep -vF .gz|tail -n1|awk '{print $2}'"
622 function nodejs_upgrade(name, ver, cmd) {
623 d("NODEJS " name " (as " DEFS["pkg"] ") " ver);
625 cmd = "npm info " DEFS["pkg"] " dist-tags.latest"
627 cmd = "npm info " name " dist-tags.latest"
635 function chrome_upgrade(name, ver, cmd, sourceurl) {
636 sourceurl = "http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/primary.xml.gz"
637 cmd = "curl -s " sourceurl " | zcat | perl -ne 'm{<name>google-chrome-" DEFS["state"] "</name>} and m{<version .*ver=.([\d.]+)} and print $1'"
645 function process_data(name, ver, rel, src, nver) {
646 if (name ~ /^php-pear-/) {
647 nver = pear_upgrade(name, ver);
648 } else if (name == "ZendFramework") {
649 nver = zf_upgrade(name, ver);
650 } else if (name == "hudson") {
651 nver = hudson_upgrade(name, ver);
652 } else if (name == "vim") {
653 nver = vim_upgrade(name, ver);
654 } else if (name == "google-chrome") {
655 nver = chrome_upgrade(name, ver);
656 } else if (name ~ "^nodejs-") {
657 nver = nodejs_upgrade(name, ver);
661 if (compare_ver(ver, nver)) {
662 print name " [OLD] " ver " [NEW] " nver
664 print name " seems ok: " ver
669 if (name == "xulrunner") {
670 ver = subst_defines(DEFS["firefox_ver"], DEFS)
671 d("package xulrunner, change version to firefox ["ver"]")
674 # this function checks if substitutions were valid, and if true:
675 # processes each URL and tries to get current file list
677 if (src[i] ~ /%{nil}/) {
678 gsub(/\%\{nil\}/, "", src[i])
680 if ( src[i] !~ /%{.*}/ && src[i] !~ /%[A-Za-z0-9_]/ ) {
682 process_source(i, src[i], name, ver)
684 print FNAME ":" i ": impossible substitution: " src[i]
690 # if you want to use DEBUG, run script with "-v DEBUG=1"
691 # or uncomment the line below
694 errno=system("wget --help > /dev/null 2>&1")
695 if (errno && errno != 3) {
696 print "No wget installed!"
699 if (ARGC>=3 && ARGV[2]=="-n") {
701 for (i=3; i<ARGC; i++) ARGV[i-1]=ARGV[i]
708 # clean frameseen for each ARG
709 for (i in frameseen) {
714 process_data(NAME,VER,REL,SRC)
715 NAME="" ; VER="" ; REL=""
716 for (i in DEFS) delete DEFS[i]
717 for (i in SRC) delete SRC[i]
720 DEFS["_alt_kernel"]=""
725 /^[Uu][Rr][Ll]:/&&(URL=="") { URL=subst_defines($2,DEFS) ; DEFS["url"]=URL }
726 /^[Nn]ame:/&&(NAME=="") { NAME=subst_defines($2,DEFS) ; DEFS["name"]=NAME }
727 /^[Vv]ersion:/&&(VER=="") { VER=subst_defines($2,DEFS) ; DEFS["version"]=VER }
728 /^[Rr]elease:/&&(REL=="") { REL=subst_defines($2,DEFS) ; DEFS["release"]=REL }
729 /^[Ss]ource[0-9]*:/ { if (/(ftp|http|https):\/\//) SRC[FNR]=subst_defines($2,DEFS) }
730 /%define/ { DEFS[$2]=subst_defines($3,DEFS) }
733 process_data(NAME,VER,REL,SRC)