Python

Python

get html page with "urllib"
then you can parse with for example soup.head or soup.head.title or print html code in a pretty way with soup.prettify()
soup.findall("a") finds all anchors. See also findNextSiblings(), findPreviousSiblings() and findParents()

Gconf: supervise a certain value with python

import gconf import gobject class GConfExample:

  def __init__ (self):
     self.__client = gconf.client_get_default()
     self.__client.add_dir("/desktop/gnome/background", gconf.CLIENT_PRELOAD_NONE)
     self.__client.notify_add("/desktop/gnome/background/picture_filename", self.__background_cb)
     self.__background_cb(self.__client)
  def __background_cb(self, client, *args, **kwargs):
     fname = client.get_string("/desktop/gnome/background/picture_filename")
     print("new background image:%s\n" % fname)

  if __name__ == _main_:
    GConfExample()
    main = gobject.MainLoop()
    main.run()

PyGoogle

  1. get license key here
  2. download pygoogle here
  3. get started
 #!/usr/bin/evn python
 import sys, codecs
 import SOAP, google
 google.LICENSE_KEY=´yourlicensekey´
 def switchUTF(():
     sys.stdout=codecs.lookup(´utf-8´)[-1](sys.stdout)

 def search(query):
    data=google.doGoogleSearch(query)
    return data

 if __name__==_main_:
    switchUTF8()
    data=search(sys.argv[1])
    print
    for res in data.results:
      print".join([res.title, res.URL, res.snippet, '\n')
    print
  1. to get meta data
  getMetaInfo(data)
  print'search time:', data.meta.searchTime
   print'query  :', data.meta.searchQuery