]> git.pld-linux.org Git - packages/rpm-build-tools.git/blobdiff - pldnotify.awk
- ejabberd url
[packages/rpm-build-tools.git] / pldnotify.awk
old mode 100644 (file)
new mode 100755 (executable)
index 7b16cae..1f403c6
@@ -1,7 +1,6 @@
 #!/bin/awk -f
-# $Revision$, $Date$
 #
-# Copyright (C) 2000-2010 PLD-Team <feedback@pld-linux.org>
+# Copyright (C) 2000-2013 PLD-Team <feedback@pld-linux.org>
 # Authors:
 #      Sebastian Zagrodzki <zagrodzki@pld-linux.org>
 #      Jacek Konieczny <jajcus@pld-linux.org>
 # - "SourceXActiveFTP" support
 # - support debian/watch http://wiki.debian.org/debian/watch/
 
+# NOTE:
+# to test run this, run:
+# $ awk -vDEBUG=1 pldnotify.awk < specfile
+#
+# To get full out of it, you need to have following tools installed:
+# - perl, sed, wget, coreutils, util-linux
+# - perl-HTML-Tree (HTML::TreeBuilder module) for better links parser (-vUSE_PERL=0 to disable)
+# - pear (php-pear-PEAR) for php-pear package updates
+# - npm for nodejs packages
+# - gem (ruby-rubygems) for ruby/rubygem packages
+# 
+# Additionally "mirrors" file in current dir, controls local mirrors you prefer
 
 function d(s) {
        if (!DEBUG) {
                return
        }
+
+#      print strftime("%Y-%m-%d %H:%M:%S ") s >> "/dev/stderr"
        print s >> "/dev/stderr"
 }
 
@@ -179,16 +192,38 @@ function mktemp(   _cmd, _tmpfile) {
 }
 
 # fix link to artificial one that will be recognized rest of this script
-function postfix_link(url, link) {
+function postfix_link(url, link,   oldlink) {
        oldlink = link
        if ((url ~/^(http|https):\/\/github.com\//) && (link ~ /.*\/tarball\//)) {
                gsub(".*\/tarball\/", "", link)
                link = link ".tar.gz"
        }
-       d("POST FIXING URL [ " oldlink " ] to [ " link " ]")
+       if (oldlink != link) {
+               d("POST FIXED URL [ " oldlink " ] to [ " link " ]")
+       }
        return link
 }
 
+# use perl HTML::TreeBuilder module to extract links from html
+# it returns TAGNAME LINK in output which is pretty stright forward to parse in awk
+function extract_links_cmd(tmpfile) {
+       return "perl -MHTML::TreeBuilder -e ' \
+       my $content = join q//, <>; \
+       my $root = new HTML::TreeBuilder; \
+       $root->parse($content); \
+       \
+       my %links = (); \
+       for (@{$root->extract_links(qw(a iframe))}) { \
+               my($link, $element, $attr, $tag) = @$_; \
+               $links{$link} = $tag; \
+       } \
+       \
+       while (my($link, $tag) = each %links) { \
+               print $tag, q/ /, $link, $/; \
+       } \
+       ' " tmpfile
+}
+
 # get all <A HREF=..> tags from specified URL
 function get_links(url,filename,   errno,link,oneline,retval,odp,wholeodp,lowerodp,tmpfile,cmd) {
 
@@ -197,11 +232,9 @@ function get_links(url,filename,   errno,link,oneline,retval,odp,wholeodp,lowero
        tmpfile = mktemp()
        tmpfileerr = mktemp()
 
-       if (url ~ /^http:\/\/(download|downloads|dl)\.(sf|sourceforge)\.net\//) {
+       if (url ~ /^http:\/\/(download|dl)\.(sf|sourceforge)\.net\//) {
                # http://downloads.sourceforge.net/project/mediainfo/source/mediainfo/
                gsub("^http://(download|dl)\.(sf|sourceforge)\.net/", "", url)
-               # http://downloads.sourceforge.net/project/mediainfo/source/mediainfo/
-               gsub("^http://downloads\.(sf|sourceforge)\.net/project/", "", url)
 
                gsub("/.*", "", url)
                url = "http://sourceforge.net/projects/" url "/files/"
@@ -245,42 +278,92 @@ function get_links(url,filename,   errno,link,oneline,retval,odp,wholeodp,lowero
                d("github tarball url, mungled url to: " url)
        }
 
+       if (url ~/^(http|https):\/\/github.com\/.*\/(.*)\/archive\//) {
+               gsub("\/archive\/.*", "/tags", url)
+               d("github archive url, mungled url to: " url)
+       }
+
        if (url ~/^(http|https):\/\/cgit\..*\/(.*)\/snapshot\//) {
                gsub("\/snapshot\/.*", "/", url)
                d("cgit snapshot tarball url, mungled url to: " url)
        }
 
+       if (url ~/^(http|https):\/\/www2\.aquamaniac\.de\/sites\/download\//) {
+               url = "http://www2.aquamaniac.de/sites/download/packages.php"
+               d("aquamaniac.de tarball url, mungled url to: " url)
+       }
+
+       if (url ~/^(http|https):\/\/www.process-one.net\/downloads\/ejabberd\//) {
+               url = "http://www.process-one.net/en/ejabberd/archive/"
+               d("ejabberd tarball url, mungled url to: " url)
+       }
+
        d("Retrieving: " url)
-       cmd = "wget --user-agent \"Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2) Gecko/20100129 PLD/3.0 (Th) Iceweasel/3.6\" -nv -O - \"" url "\" -t 2 -T 45 --passive-ftp --no-check-certificate > " tmpfile " 2> " tmpfileerr
+       user_agent = "Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2) Gecko/20100129 PLD/3.0 (Th) Iceweasel/3.6"
+       cmd = "wget -t 2 -T 45 --user-agent \"" user_agent "\" -nv -O - \"" url "\" --passive-ftp --no-check-certificate > " tmpfile " 2> " tmpfileerr
        d("Execute: " cmd)
        errno = system(cmd)
        d("Execute done")
 
-       if (errno==0) {
-               wholeodp = ""
-               d("Reading success response...")
-               while (getline oneline < tmpfile)
-                       wholeodp = (wholeodp " " oneline)
-#                      d("Response: " wholeodp)
-       } else {
+       if (errno != 0) {
                d("Reading failure response...")
                wholeerr = ""
                while (getline oneline < tmpfileerr)
-                       wholeerr=(wholeerr " " oneline)
+                       wholeerr = (wholeerr " " oneline)
                d("Error Response: " wholeerr)
-       }
-
-       system("rm -f " tmpfile)
-       system("rm -f " tmpfileerr)
 
-       if (errno != 0) {
+               system("rm -f " tmpfile)
+               system("rm -f " tmpfileerr)
                retval = ("WGET ERROR: " errno ": " wholeerr)
                return retval
        }
+       system("rm -f " tmpfileerr)
 
        urldir = url;
        sub(/[^\/]+$/, "", urldir)
 
+if (USE_PERL) {
+       cmd = extract_links_cmd(tmpfile)
+       while (cmd | getline) {
+               tag = $1
+               link = substr($0, length(tag) + 2)
+
+               if (tag == "iframe") {
+                       d("Frame: " link)
+                       if (url !~ /\//) {
+                               link = (urldir link)
+                               d("Frame->: " link)
+                       }
+
+                       if (link_seen(link)) {
+                               continue
+                       }
+                       retval = (retval " " get_links(link))
+               }
+
+               if (link_seen(link)) {
+                       continue
+               }
+
+               retval = (retval " " link)
+               d("href(): " link)
+       }
+       close(cmd)
+       system("rm -f " tmpfile)
+
+       d("Returning: [" retval "]")
+       return retval
+}
+
+       wholeodp = ""
+       d("Reading success response...")
+       while (getline oneline < tmpfile) {
+               wholeodp = (wholeodp " " oneline)
+#              d("Response: " wholeodp)
+       }
+       d("Reponse read done...")
+       system("rm -f " tmpfile)
+
        while (match(wholeodp, /<([aA]|[fF][rR][aA][mM][eE])[ \t][^>]*>/) > 0) {
                d("Processing links...")
                odp = substr(wholeodp,RSTART,RLENGTH);
@@ -316,7 +399,12 @@ function get_links(url,filename,   errno,link,oneline,retval,odp,wholeodp,lowero
                                continue
                        }
 
-                       retval = (retval " " link)
+                       # link ends with at least 2 digit version
+                       mlink = ""
+                       if (link ~ /^.*\/[v]*[0-9\.]+[0-9]\/$/)
+                               mlink = get_links(link)
+
+                       retval = (retval " " link " " mlink)
                        d("href(\"\"): " link)
                } else if (lowerodp ~ /href=[ \t]*'[^']*'/) {
                        sub(/[hH][rR][eE][fF]=[ \t]*'/,"href='",odp)
@@ -353,7 +441,7 @@ function get_links(url,filename,   errno,link,oneline,retval,odp,wholeodp,lowero
                }
        }
 
-       d("Returning: " retval)
+       d("Returning: [" retval "]")
        return retval
 }
 
@@ -388,7 +476,7 @@ function find_mirror(url) {
                        mname=fields[3]
                        prefix=substr(url,1,length(origin))
                        if (prefix==origin){
-                               d("Mirror fount at " mname)
+                               d("Mirror found at " mname)
                                close("mirrors")
                                return mirror substr(url,length(origin)+1)
                        }
@@ -398,12 +486,12 @@ function find_mirror(url) {
        return url
 }
 
-function process_source(number,lurl,name,version) {
 # fetches file list, and compares version numbers
+function process_source(number, lurl, name, version) {
        d("Processing " lurl)
 
-       if ( index(lurl,version)==0 ) {
-               d("There is no version number.")
+       if (index(lurl, version) == 0) {
+               d("There is no version number ["version"] in ["lurl"]")
                return 0
        }
 
@@ -429,9 +517,9 @@ function process_source(number,lurl,name,version) {
        d("and a file: " filename)
 
        filenameexp=filename
-       gsub("\+","\\+",filenameexp)
+       gsub("[+]","\\+",filenameexp)
        sub(version,"[A-Za-z0-9.]+",filenameexp)
-       gsub("\.","\\.",filenameexp)
+       gsub("[.]","\\.",filenameexp)
        d("Expression: " filenameexp)
        match(filename,version)
        prever=substr(filename,1,RSTART-1)
@@ -488,100 +576,147 @@ function process_source(number,lurl,name,version) {
                                }
                        }
                }
-               if (finished==0)
+               if (finished == 0)
                        print name "(" number ") seems ok: " oldversion
                else
                        print name "(" number ") [OLD] " oldversion " [NEW] " version
        }
 }
 
-function rss_upgrade(name, ver, url, regex, cmd, nver) {
+function rss_upgrade(name, ver, url, regex, cmd) {
        regex = "s/.*<title>" regex "<\/title>.*/\\1/p"
-       cmd = "wget -q -O - " url " | sed -nre '" regex "' | head -n1"
+       cmd = "wget -t 2 -T 45 -q -O - " url " | sed -nre '" regex "' | head -n1"
 
        d("rss_upgrade_cmd: " cmd)
-       cmd | getline nver
+       cmd | getline ver
        close(cmd)
 
-       if (compare_ver(ver, nver)) {
-               print name " [OLD] " ver " [NEW] " nver
-       } else {
-               print name " seems ok: " ver
-       }
+       return ver
 }
 
 # check for ZF upgrade from rss
 function zf_upgrade(name, ver) {
-       rss_upgrade(name, ver, \
+       return rss_upgrade(name, ver, \
                "http://devzone.zend.com/tag/Zend_Framework_Management/format/rss2.0", \
                "Zend Framework ([^\\s]+) Released" \
        );
 }
 
 function hudson_upgrade(name, ver) {
-       rss_upgrade(name, ver, \
+       return rss_upgrade(name, ver, \
                "https://hudson.dev.java.net/servlets/ProjectRSS?type=news", \
                "Hudson ([0-9.]+) released" \
        );
 }
 
 # upgrade check for pear package using PEAR CLI
-function pear_upgrade(name, ver,    pname, pearcmd, nver) {
-       pname = name;
-       sub(/^php-pear-/, "", pname);
+function pear_upgrade(name, ver,    cmd) {
+       sub(/^php-pear-/, "", name);
 
-       pearcmd = "pear remote-info " pname " | awk '/^Latest/{print $NF}'"
-       d("pearcmd: " pearcmd)
-       pearcmd | getline nver
-       close(pearcmd)
+       cmd = "pear remote-info " name " | awk '/^Latest/{print $NF}'"
+       d("PEAR: " cmd)
+       cmd | getline ver
+       close(cmd)
+
+       return ver
+}
+
+function vim_upgrade(name, ver,     cmd) {
+       # %patchset_source -f ftp://ftp.vim.org/pub/editors/vim/patches/7.2/7.2.%03g 1 %{patchlevel}
+       cmd = "wget -q -O - ftp://ftp.vim.org/pub/editors/vim/patches/" DEFS["ver"] "/MD5SUMS|grep -vF .gz|tail -n1|awk '{print $2}'"
+       d("VIM: " cmd)
+       cmd | getline ver
+       close(cmd)
+       return ver
+}
 
-       if (compare_ver(ver, nver)) {
-               print name " [OLD] " ver " [NEW] " nver
+function nodejs_upgrade(name, ver,   cmd) {
+       d("NODEJS " name " (as " DEFS["pkg"] ") " ver);
+       if (DEFS["pkg"]) {
+               cmd = "npm info " DEFS["pkg"] " dist-tags.latest"
        } else {
-               print name " seems ok: " ver
+               cmd = "npm info " name " dist-tags.latest"
        }
+       cmd | getline ver
+       close(cmd)
 
-       return
+       return ver
 }
 
-function vim_upgrade(name, ver,     mver, nver, vimcmd) {
-       # %patchset_source -f ftp://ftp.vim.org/pub/editors/vim/patches/7.2/7.2.%03g 1 %{patchlevel}
-       mver = substr(ver, 0, 4)
-       vimcmd = "wget -q -O - ftp://ftp.vim.org/pub/editors/vim/patches/"mver"/MD5SUMS|grep -vF .gz|tail -n1|awk '{print $2}'"
-       d("vimcmd: " vimcmd)
-       vimcmd | getline nver
-       close(vimcmd)
-
-       if (compare_ver(ver, nver)) {
-               print name " [OLD] " ver " [NEW] " nver
+function rubygem_upgrade(name, ver,   cmd, pkg) {
+       if (DEFS["gem_name"]) {
+               pkg = DEFS["gem_name"];
+
+       } else if (DEFS["gemname"]) {
+               pkg = DEFS["gemname"];
+
+       } else if (DEFS["pkgname"]) {
+               pkg = DEFS["pkgname"];
+
        } else {
-               print name " seems ok: " ver
+               pkg = name;
+               gsub(/^ruby-/, "", pkg);
        }
+
+       cmd = "gem list --remote '^" pkg "$' | awk '/" pkg "/ {v=$2; sub(/\(/, \"\", v); print v}'"
+       d("RUBYGEM " name " (as " pkg ") " ver ": " cmd);
+       cmd | getline ver
+
+       close(cmd)
+
+       return ver
 }
 
-function process_data(name,ver,rel,src) {
+function chrome_upgrade(name, ver,   cmd, sourceurl) {
+       sourceurl = "http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/primary.xml.gz"
+       cmd = "curl -s " sourceurl " | zcat | perl -ne 'm{<name>google-chrome-" DEFS["state"] "</name>} and m{<version .*ver=.([\d.]+)} and print $1'"
+       d("CHROME " cmd);
+       cmd | getline ver
+       close(cmd)
+
+       return ver
+}
+
+function process_data(name, ver, rel, src,   nver) {
        if (name ~ /^php-pear-/) {
-               return pear_upgrade(name, ver);
-       }
-       if (name == "ZendFramework") {
-               return zf_upgrade(name, ver);
-       }
-       if (name == "hudson") {
-               return hudson_upgrade(name, ver);
+               nver = pear_upgrade(name, ver);
+       } else if (name == "ZendFramework") {
+               nver = zf_upgrade(name, ver);
+       } else if (name == "hudson") {
+               nver = hudson_upgrade(name, ver);
+       } else if (name == "vim") {
+               nver = vim_upgrade(name, ver);
+       } else if (name == "google-chrome") {
+               nver = chrome_upgrade(name, ver);
+       } else if (name ~ "^nodejs-") {
+               nver = nodejs_upgrade(name, ver);
+       } else if (name ~ "^ruby-") {
+               nver = rubygem_upgrade(name, ver);
+       }
+
+       if (nver) {
+               if (compare_ver(ver, nver)) {
+                       print name " [OLD] " ver " [NEW] " nver
+               } else {
+                       print name " seems ok: " ver
+               }
+               return;
        }
-       if (name == "vim") {
-               return vim_upgrade(name, ver);
+
+       if (name == "xulrunner") {
+               ver = subst_defines(DEFS["firefox_ver"], DEFS)
+               d("package xulrunner, change version to firefox ["ver"]")
        }
 
 # this function checks if substitutions were valid, and if true:
 # processes each URL and tries to get current file list
        for (i in src) {
-               if ( src[i] ~ /%{nil}/ ) {
+               if (src[i] ~ /%{nil}/) {
                        gsub(/\%\{nil\}/, "", src[i])
                }
                if ( src[i] !~ /%{.*}/ && src[i] !~ /%[A-Za-z0-9_]/ )  {
                        d("Source: " src[i])
-                       process_source(i,src[i],name,ver)
+                       process_source(i, src[i], name, ver)
                } else {
                        print FNAME ":" i ": impossible substitution: " src[i]
                }
@@ -589,12 +724,12 @@ function process_data(name,ver,rel,src) {
 }
 
 BEGIN {
-       # if U want to use DEBUG, run script with "-v DEBUG=1"
+       # if you want to use DEBUG, run script with "-v DEBUG=1"
        # or uncomment the line below
        # DEBUG = 1
 
        errno=system("wget --help > /dev/null 2>&1")
-       if (errno) {
+       if (errno && errno != 3) {
                print "No wget installed!"
                exit 1
        }
@@ -621,6 +756,7 @@ FNR==1 {
        FNAME=FILENAME
        DEFS["_alt_kernel"]=""
        DEFS["20"]="\\ "
+       DEFS["nil"]=""
 }
 
 /^[Uu][Rr][Ll]:/&&(URL=="") { URL=subst_defines($2,DEFS) ; DEFS["url"]=URL }
This page took 0.157622 seconds and 4 git commands to generate.