Index: action.py
==================================================================
--- action.py
+++ action.py
@@ -29,11 +29,11 @@
 
 
 import re
 import os
 from ahttp import fix_url as http_fix_url, session
-from config import conf, __print__ as debug, dbg
+from config import *
 import platform
 import copy
 import json
 from datetime import datetime
 from xml.sax.saxutils import escape as xmlentities, unescape as xmlunescape
@@ -111,13 +111,13 @@
 
 
 # Exec wrapper
 #
 def run(cmd):
-    debug(dbg.PROC, "Exec:", cmd)
+    log.PROC("Exec:", cmd)
     try:    os.system("start \"%s\"" % cmd if conf.windows else cmd + " &")
-    except: debug(dbg.ERR, "Command not found:", cmd)
+    except: log.ERR("Command not found:", cmd)
 
 
 # Start web browser
 #
 def browser(url):
@@ -207,11 +207,11 @@
 #  · But returns a list of [urls] after playlist extraction.
 #  · If repackaging as .m3u/.pls/.xspf, returns the local [fn].
 #
 def convert_playlist(url, source, dest, local_file=True, row={}):
     urls = []
-    debug(dbg.PROC, "convert_playlist(", url, source, dest, ")")
+    log.PROC("convert_playlist(", url, source, dest, ")")
 
     # Leave alone if format matches, or if already "srv" URL, or if not http (local path, mms:/rtsp:)
     if source == dest or source in ("srv", "href") or not re.match("(https?|spdy)://", url):
         return [url]
     
@@ -227,17 +227,17 @@
     ext = probe_playlist_fn_ext(url)
     probe = probe_playlist_content(cnt)
 
     # Check ambiguity (except pseudo extension)
     if len(set([source, mime, probe])) > 1:
-        debug(dbg.ERR, "Possible playlist format mismatch:", "listformat={}, http_mime={}, rx_probe={}, ext={}".format(source, mime, probe, ext))
+        log.ERR("Possible playlist format mismatch:", "listformat={}, http_mime={}, rx_probe={}, ext={}".format(source, mime, probe, ext))
 
     # Extract URLs from content
     for fmt in [id[0] for id in extract_playlist.extr_urls]:
         if not urls and fmt in (source, mime, probe, ext, "raw"):
             urls = extract_playlist(cnt).format(fmt)
-            debug(dbg.DATA, "conversion from:", source, " with extractor:", fmt, "got URLs=", urls)
+            log.DATA("conversion from:", source, " with extractor:", fmt, "got URLs=", urls)
             
     # Return original, or asis for srv targets
     if not urls:
         return [url]
     elif dest in ("srv", "href"):
@@ -245,11 +245,11 @@
 
     # Otherwise convert to local file
     if local_file:
         fn, is_unique = tmp_fn(cnt, dest)
         with open(fn, "w") as f:
-            debug(dbg.DATA, "exporting with format:", dest, " into filename:", fn)
+            log.DATA("exporting with format:", dest, " into filename:", fn)
             f.write( save_playlist(source="srv", multiply=True).export(urls, row, dest) )
         return [fn]
     else:
         return urls
 
@@ -287,11 +287,11 @@
     # Map MIME to abbr type (pls, m3u, xspf)
     if listfmt_t.get(mime):
         mime = listfmt_t.get(mime)
     # Raw content (mp3, flv)
     elif mediafmt_t.get(mime):
-        debug(dbg.ERR, "Got media MIME type for expected playlist", mime, " on url=", url)
+        log.ERR("Got media MIME type for expected playlist", mime, " on url=", url)
         mime = mediafmt_t.get(mime)
         return (mime, url)
     # Rejoin body
     content = "\n".join(str.decode(errors='replace') for str in r.iter_lines())
     return (mime, content)
@@ -311,11 +311,11 @@
     def __init__(self, text):
         self.src = text
         
     # Extract only URLs from given source type
     def format(self, fmt):
-        debug(dbg.DATA, "input extractor/regex:", fmt, len(self.src))
+        log.DATA("input extractor/regex:", fmt, len(self.src))
 
         # find extractor
         if fmt in dir(self):
             return self.__dict__[fmt]()
 
@@ -411,11 +411,11 @@
                     # Or just allow one stream per station in a playlist entry
                     if not self.multiply:
                         break
             rows = new_rows
 
-        debug(dbg.DATA, "conversion to:", dest, "  with rows=", rows)
+        log.DATA("conversion to:", dest, "  with rows=", rows)
 
         # call conversion schemes
         converter = getattr(self, dest) or self.pls
         return converter(rows)
 

Index: ahttp.py
==================================================================
--- ahttp.py
+++ ahttp.py
@@ -12,11 +12,11 @@
 # simulating requests too. Hooks into mains gtk.statusbar().
 # And can normalize URLs to always carry a trailing slash
 # after the domain name.
 
 
-from config import conf, __print__, dbg
+from config import *
 import requests
 
 
 #-- hooks to progress meter and status bar in main window
 feedback = None
@@ -52,11 +52,11 @@
 #-- Retrieve data via HTTP
 #
 #  Well, it says "get", but it actually does POST and AJAXish GET requests too.
 #
 def get(url, params={}, referer="", post=0, ajax=0, binary=0, feedback=None, content=True):
-    __print__( dbg.HTTP, "GET", url, params )
+    log.HTTP("GET", url, params )
 
     # statusbar info
     progress_feedback(url)
     
     # combine headers
@@ -74,19 +74,19 @@
     if post:
         r = session.post(url, params=params, headers=headers, timeout=7.5)
     else:    
         r = session.get(url, params=params, headers=headers, timeout=9.75)
 
-    __print__( dbg.HTTP, r.request.headers );
-    __print__( dbg.HTTP, r.headers );
+    log.HTTP(r.request.headers );
+    log.HTTP(r.headers );
             
     # finish, clean statusbar
     #progress_feedback(0.9)
     #progress_feedback("")
 
     # result
-    __print__( dbg.INFO, "Content-Length", len(r.content) )
+    log.INFO("Content-Length", len(r.content) )
     if not content:
         return r
     elif binary:
         return r.content
     else:

Index: channels/__init__.py
==================================================================
--- channels/__init__.py
+++ channels/__init__.py
@@ -102,11 +102,11 @@
     @property
     def current(self):
         return self.__current
     @current.setter
     def current(self, newcat):
-        __print__(dbg.PROC, "{}.current:={} ← from {}".format(self.module, newcat, [inspect.stack()[x][3] for x in range(1,4)]))
+        log.PROC("{}.current:={} ← from {}".format(self.module, newcat, [inspect.stack()[x][3] for x in range(1,4)]))
         self.__current = newcat
         return self.__current
 
 
     #--------------------------- initialization --------------------------------
@@ -165,11 +165,11 @@
 
 
     # Statusbar stub (defers to parent/main window, if in GUI mode)
     def status(self, *v):
         if self.parent: self.parent.status(*v)
-        else: __print__(dbg.INFO, "status():", *v)
+        else: log.INFO("status():", *v)
 
 
         
     #--------------------- streams/model data accesss ---------------------------
         
@@ -245,11 +245,11 @@
             print "load(None)"
             return
 
         # get data from cache or download
         if force or not category in self.streams:
-            __print__(dbg.PROC, "load", "update_streams")
+            log.PROC("load", "update_streams")
             self.status("Updating streams...")
             self.status(-0.1)
             if category == "empty":
                 new_streams = self.empty_stub
             else:
@@ -263,11 +263,11 @@
                     if len(set(["", None]) & set([row.get("title"), row.get("url")])):
                         continue
                     try:
                         modified.append( self.postprocess(row) )
                     except Exception as e:
-                        __print__(e, dbg.DATA, "Missing title or url. Postprocessing failed:", row)
+                        log.DATA(e, "Missing title or url. Postprocessing failed:", row)
                 new_streams = modified
   
                 # don't lose forgotten streams
                 if conf.retain_deleted:
                    self.streams[category] = new_streams + self.deleted_streams(new_streams, self.streams.get(category,[]))
@@ -279,11 +279,11 @@
   
             else:
                 # parse error
                 self.status("Category parsed empty.")
                 self.streams[category] = self.nothing_found
-                __print__(dbg.INFO, "Oooops, parser returned nothing for category " + category)
+                log.INFO("Oooops, parser returned nothing for category " + category)
                 
         # assign to treeview model
         uikit.do(lambda:uikit.columns(self.gtk_list, self.datamap, self.prepare(self.streams[category])))
 
         # set pointer
@@ -381,35 +381,35 @@
     def first_show(self):
 
         # Already processed
         if (self.shown == 55555):
             return
-        __print__(dbg.PROC, self.module, "→ first_show()", ", current=", self.current, ", categories=", len(self.categories))
+        log.PROC(self.module, "→ first_show()", ", current=", self.current, ", categories=", len(self.categories))
     
         # if category tree is empty, initialize it
         if not self.categories:
-            __print__(dbg.PROC, self.module, "→ first_show() → reload_categories()");
+            log.PROC(self.module, "→ first_show() → reload_categories()");
             try:
                 self.reload_categories()
             except:
-                __print__(dbg.ERR, "HTTP error or extraction failure.")
+                log.ERR("HTTP error or extraction failure.")
                 self.categories = ["empty"]
             self.display_categories()
 
         # Select first category
         if not self.current:
             self.current = self.str_from_struct(self.categories) or None
-            __print__(dbg.STAT, self.module, "→ first_show(); use first category as current =", self.current)
+            log.STAT(self.module, "→ first_show(); use first category as current =", self.current)
             self.shown = 0,
 
         # Show current category in any case
-        __print__(dbg.UI, self.module, "→ first_show(); station list → load(", self.current, ")")
+        log.UI(self.module, "→ first_show(); station list → load(", self.current, ")")
         uikit.do(self.load, self.current)
     
         # put selection/cursor on last position
         if True:#self.shown != None:
-            __print__(dbg.STAT, self.module+".first_show()", "select last known category treelist position =", self.shown)
+            log.STAT(self.module+".first_show()", "select last known category treelist position =", self.shown)
             try:
                 uikit.do(lambda:self.gtk_list.get_selection().select_path(self.shown))
             except:
                 pass
             

Index: channels/bookmarks.py
==================================================================
--- channels/bookmarks.py
+++ channels/bookmarks.py
@@ -73,11 +73,11 @@
     # all entries just come from "bookmarks.json"
     def cache(self):
         # stream list
         cache = conf.load(self.module)
         if (cache):
-            __print__(dbg.PROC, "load bookmarks.json")
+            log.PROC("load bookmarks.json")
             self.streams = cache
         
 
 
     # save to cache file
@@ -138,11 +138,11 @@
         
     # update bookmarks from freshly loaded streams data
     def heuristic_update(self, updated_channel, updated_category):
 
         if not conf.heuristic_bookmark_update: return
-        __print__(dbg.PROC, "heuristic bookmark update")
+        log.PROC("heuristic bookmark update")
         save = 0
         fav = self.streams["favourite"]
 
         # First we'll generate a list of current bookmark stream urls, and then
         # remove all but those from the currently UPDATED_channel + category.

Index: channels/configwin.py
==================================================================
--- channels/configwin.py
+++ channels/configwin.py
@@ -65,11 +65,11 @@
                 elif type(w) is gtk.ListStore:
                     w.clear()
                     for k,v in val.items():
                         w.append([k, v, True, self.app_bin_check(v)])
                     w.append(["", "", True, gtk.STOCK_NEW])
-            __print__(dbg.CONF, "config load", prefix+key, val, type(w))
+            #log.CONF("config load", prefix+key, val, type(w))
 
     # Store gtk widget valus back into conf. dict
     def save_config(self, config, prefix="config_", save=0):
         for key,val in config.items():
             w = self.main.get_widget(prefix + key)
@@ -87,11 +87,11 @@
                 elif type(w) is gtk.ListStore:
                     config[key] = {}
                     for row in w:
                         if row[0] and row[1]:
                             config[key][row[0]] = row[1]
-            __print__(dbg.CONF, "config save", prefix+key, val)
+            log.CONF("config save", prefix+key, val)
     
     
     # Generic Gtk callback to update ListStore when entries get edited.
     # (The main signal_connect() dict prepares individual lambda funcs
     # for each ListStore column id.)

Index: channels/dnd.py
==================================================================
--- channels/dnd.py
+++ channels/dnd.py
@@ -4,13 +4,14 @@
 # description: Copy streams/stations from and to other applications.
 # depends: uikit
 # version: 0.5
 # type: interface
 # config:
-#   { name: dnd_format, type: select, value: xspf, select: "pls|m3u|xspf|jspf|asx|smil", description: "Default temporary file format for copying a station entry." }
+#   { name: dnd_format, type: select, value: xspf, select: "pls|m3u|xspf|jspf|asx|smil", description: "Default temporary file format for copying a station." }
 # category: ui
-# priority: experimental
+# priority: default
+# support: experimental
 #
 # Implements Gtk/X11 drag and drop support for station lists.
 # Should allow to export either just stream URLs, or complete
 # PLS, XSPF collections.
 #

Index: channels/exportcat.py
==================================================================
--- channels/exportcat.py
+++ channels/exportcat.py
@@ -44,15 +44,15 @@
     def savewindow(self, *w):
         cn = self.parent.channel()
         source = cn.listformat
         streams = cn.streams[cn.current]
         fn = uikit.save_file("Export category", None, "%s.%s.%s" % (cn.module, cn.current, conf.export_format))
-        __print__(dbg.PROC, "Exporting category to", fn)
+        log.PROC("Exporting category to", fn)
         if fn:
             dest = re.findall("\.(m3u8?|pls|xspf|jspf|json|smil|asx)8?$", fn.lower())
             if dest:
                 dest = dest[0]
             else:
                 self.parent.status("Unsupported export playlist type (file extension).")
                 return
             action.save_playlist(source="asis", multiply=False).file(rows=streams, fn=fn, dest=dest)
         pass            

Index: channels/file.py
==================================================================
--- channels/file.py
+++ channels/file.py
@@ -25,14 +25,14 @@
 try:
     from mutagen import File as get_meta
 except:
     try:
         from ID3 import ID3
-        __print__(dbg.INFO, "Just basic ID3 support")
+        log.INFO("Just basic ID3 support")
         get_meta = lambda fn: dict([(k.lower(),v) for k,v in ID3(fn).iteritems()])
     except:
-        __print__(dbg.INIT, "You are out of luck in regards to mp3 browsing. No ID3 support.")
+        log.INIT("You are out of luck in regards to mp3 browsing. No ID3 support.")
         get_meta = lambda *x: {}
 
 
 # work around mutagens difficult interface
 def mutagen_postprocess(d):

Index: channels/global_key.py
==================================================================
--- channels/global_key.py
+++ channels/global_key.py
@@ -41,11 +41,11 @@
         conf.add_plugin_defaults(self.meta, self.module)
         try:
             for i,keyname in enumerate(conf.switch_key.split(",")):    # allow multiple keys
                 keybinder.bind(keyname, self.switch, ((-1 if i else +1)))   # forward +1 or backward -1
         except:
-            __print__(dbg.ERR, "plugin global_key: Key `%s` could not be registered" % conf.switch_key)
+            log.ERR("plugin global_key: Key `%s` could not be registered" % conf.switch_key)
     
         
     # key event
     def switch(self, num, *any):
         

Index: channels/icast.py
==================================================================
--- channels/icast.py
+++ channels/icast.py
@@ -33,11 +33,11 @@
 # collect 200 station entries (see main options).
 
 
 import re
 import json
-from config import conf, dbg, __print__
+from config import *
 from channels import *
 import ahttp as http
 
 
 # Surfmusik sharing site
@@ -89,8 +89,8 @@
             if len(r) >= data["meta"]["total_count"] or len(data["stations"]) < 10:
                 break
             else:
                 params["page"] = int(data["meta"]["current_page"]) + 1
                 self.parent.status(params["page"] * 9.5 / float(conf.max_streams))
-            #__print__(dbg.DATA, data)
+            #log.DATA(data)
         return r
 

Index: channels/internet_radio.py
==================================================================
--- channels/internet_radio.py
+++ channels/internet_radio.py
@@ -26,11 +26,11 @@
 #
 
 
 from channels import *
 import re
-from config import conf, __print__, dbg
+from config import *
 import ahttp as http
 from pq import pq
 
 
 
@@ -82,27 +82,27 @@
             if str(page+1) not in rx_pages.findall(html[-1]):
                 break
             self.parent.status(float(page)/float(max_pages+1))
 
         # Alternatively try regex or pyquery parsing
-        #__print__(dbg.HTTP, html)
+        #log.HTTP(html)
         for use_rx in [not conf.pyquery, conf.pyquery]:
             try:
                 entries = (self.with_regex(html) if use_rx else self.with_dom(html))
                 if len(entries):
                     break
             except Exception as e:
-                __print__(dbg.ERR, e)
+                log.ERR(e)
                 continue
             
         # fin
         return entries
 
 
     # Regex extraction
     def with_regex(self, html):
-        __print__(dbg.PROC, "internet-radio, regex")
+        log.PROC("internet-radio, regex")
         r = []
         html = "\n".join(html)
         
         # Break up into <tr> blocks before extracting bits
         rx_tr = re.compile("""<tr[^>]*>(.+?)</tr>""", re.S)
@@ -116,11 +116,11 @@
                .*?   (\d+)\s*Kbps
         """, re.S|re.X)
 
         for div in rx_tr.findall(html):
             if div.find('id="pagination"') < 0:
-                #__print__(dbg.DATA, len(div))
+                #log.DATA(len(div))
                 uu = rx_data.search(div)
                 if uu:
                     (url, title, playing, homepage, genres, listeners, bitrate) = uu.groups()
                     
                     # transform data
@@ -133,17 +133,17 @@
                         "bitrate": int(bitrate or 0),
                         "listeners": int(listeners or 0),
                         "format": "audio/mpeg", # there is no stream info on that, but internet-radio.org.uk doesn't seem very ogg-friendly anyway, so we assume the default here
                     })
                 else:
-                    __print__(dbg.DATA, "Regex couldn't decipher entry:", div)
+                    log.DATA("Regex couldn't decipher entry:", div)
         return r
 
 
     # DOM traversing
     def with_dom(self, html_list):
-        __print__(dbg.PROC, "internet-radio, dom")
+        log.PROC("internet-radio, dom")
         rx_numbers = re.compile("(\d+)")
         r = []
         for html in html_list:
             # the streams are arranged in table rows
             doc = pq(html)

Index: channels/itunes.py
==================================================================
--- channels/itunes.py
+++ channels/itunes.py
@@ -30,11 +30,11 @@
 #
 # In this module only iTunes will be queried for now.
 #
 
 import re
-from config import conf, dbg, __print__
+from config import *
 from channels import *
 import ahttp as http
 
 
 # Surfmusik sharing site
@@ -84,11 +84,11 @@
     # Just copy over stream URLs and station titles
     def update_streams(self, cat):
     
         m3u = http.get(self.base, {"category": cat.lower()})
         if len(m3u) < 256:
-            __print__(dbg.ERR, m3u)
+            log.ERR(m3u)
         
         rx_m3u = re.compile(r"""
             ^File(\d+)\s*=\s*(http://[^\s]+)\s*$\s*
             ^Title\1\s*=\s*([^\r\n]+)\s*$\s*
         """, re.M|re.I|re.X)

Index: channels/jamendo.py
==================================================================
--- channels/jamendo.py
+++ channels/jamendo.py
@@ -36,11 +36,11 @@
 # retrieval is going to become inaccessible soon.)
 
 
 import re
 import ahttp as http
-from config import conf, __print__, dbg
+from config import *
 from channels import *
 import json
 
 
 # jamendo CC music sharing site

Index: channels/live365.py
==================================================================
--- channels/live365.py
+++ channels/live365.py
@@ -24,11 +24,11 @@
 # streamtuner2 modules
 from config import conf
 from uikit import uikit
 import ahttp as http
 from channels import *
-from config import __print__, dbg
+from config import *
 import action
 
 # python modules
 import re
 import xml.dom.minidom

Index: channels/modarchive.py
==================================================================
--- channels/modarchive.py
+++ channels/modarchive.py
@@ -27,11 +27,11 @@
 
 import re
 import ahttp as http
 from config import conf
 from channels import *
-from config import __print__, dbg
+from config import *
 
 
 # The MOD Archive
 #
 # Modarchive actually provides an API
@@ -97,11 +97,11 @@
             .*?    >(?:Rated|Unrated)</a>\s*(\d*)
         """, re.X|re.S)
         
         for uu in rx_mod.findall(html):
             (url, id, fmt, title, file, rating) = uu
-            #__print__( dbg.DATA, uu )
+            #log.DATA( uu )
             entries.append({
                 "genre": cat,
                 "url": url,
                 "id": id,
                 "format": self.mime_fmt(fmt) + "+zip",

Index: channels/punkcast.py
==================================================================
--- channels/punkcast.py
+++ channels/punkcast.py
@@ -26,11 +26,11 @@
 import re
 import ahttp as http
 from config import conf
 import action
 from channels import *
-from config import __print__, dbg
+from config import *
 
 
 # basic.ch broadcast archive
 class punkcast (ChannelPlugin):
 
@@ -79,13 +79,13 @@
         rx_sound = re.compile("""(http://[^"<>]+[.](mp3|ogg|m3u|pls|ram))""")
         html = http.get(row["homepage"])
         
         # look up ANY audio url
         for uu in rx_sound.findall(html):
-            __print__( dbg.DATA, uu )
+            log.DATA( uu )
             (url, fmt) = uu
             action.play(url, self.mime_fmt(fmt), "srv")
             return
         
         # or just open webpage
         action.browser(row["homepage"])
 

Index: channels/radiobrowser.py
==================================================================
--- channels/radiobrowser.py
+++ channels/radiobrowser.py
@@ -33,11 +33,11 @@
 # Also has a neat JSON API, so is quite easy to support.
 
 
 import re
 import json
-from config import conf, dbg, __print__
+from config import *
 from channels import *
 import ahttp as http
 
 
 # API endpoints:

Index: channels/radiotray.py
==================================================================
--- channels/radiotray.py
+++ channels/radiotray.py
@@ -101,11 +101,11 @@
         row = self.parent.row()
         if row:
             # RadioTray doesn't have an addRadio method yet, so just fall back to play the stream URL
             try:
                 group = self.map_group(row.get("genre"))
-                __print__(dbg.PROC, "mapping genre '%s' to RT group '%s'" % (row["genre"], group))
+                log.PROC("mapping genre '%s' to RT group '%s'" % (row["genre"], group))
                 self.radiotray().addRadio(row["title"], row["url"], group)
             except:
                 self.radiotray().playUrl(row["url"])
         pass
 

Index: channels/search.py
==================================================================
--- channels/search.py
+++ channels/search.py
@@ -100,11 +100,11 @@
         self.prepare_search()
         entries = []
         for i,cn in enumerate([self.main.channels[c] for c in self.targets]):
             if cn.has_search:  # "search" in cn.update_streams.func_code.co_varnames:
                 self.main.status("Server searching: " + cn.module)
-                __print__(dbg.PROC, "has_search:", cn.module)
+                log.PROC("has_search:", cn.module)
                 try:
                     add = cn.update_streams(cat=None, search=self.q)
                     for row in add:
                         row["genre"] = cn.meta["title"] + " " + row.get("genre", "")
                     entries += add

Index: channels/shoutcast.py
==================================================================
--- channels/shoutcast.py
+++ channels/shoutcast.py
@@ -24,13 +24,13 @@
 
 
 import ahttp as http
 from json import loads as json_decode
 import re
-from config import conf, __print__, dbg
+from config import *
+from channels import *
 from pq import pq
-from channels import *    # works everywhere but in this plugin(???!)
 import channels
 from compat2and3 import urllib
 
 
 
@@ -65,11 +65,11 @@
         
     # Extracts the category list from www.shoutcast.com,
     # stores a catmap (title => id)
     def update_categories(self):
         html = http.get(self.base_url)
-        #__print__( dbg.DATA, html )
+        #log.DATA( html )
         self.categories = []
         
         # Genre list in sidebar
         """  <li><a id="genre-90" href="/Genre?name=Adult" onclick="loadStationsByGenre('Adult', 90, 89); return false;">Adult</a></li> """
         rx = re.compile(r"loadStationsByGenre\(  '([^']+)' [,\s]* (\d+) [,\s]* (\d+)  \)", re.X)
@@ -92,11 +92,11 @@
 
     # downloads stream list from shoutcast for given category
     def update_streams(self, cat):
 
         if (cat not in self.catmap):
-            __print__( dbg.ERR, "Category not in known map.", cat )
+            log.ERR( "Category not in known map.", cat )
             return []
         id = self.catmap[cat]
 
         # page
         url = "http://www.shoutcast.com/Home/BrowseByGenre"
@@ -104,11 +104,11 @@
         referer = None
         try:
             json = http.get(url, params=params, referer=referer, post=1, ajax=1)
             json = json_decode(json)
         except:
-            __print__(dbg.ERR, "HTTP request or JSON decoding failed. Outdated python/requests perhaps.")
+            log.ERR("HTTP request or JSON decoding failed. Outdated python/requests perhaps.")
             return []
         self.parent.status(0.75)
 
         # remap JSON
         entries = []
@@ -123,8 +123,8 @@
                 "url": "http://yp.shoutcast.com/sbin/tunein-station.pls?id=%s" % e.get("ID", "0"),
                 "homepage": "",
                 "format": "audio/mpeg"
             })
 
-        #__print__(dbg.DATA, entries)
+        #log.DATA(entries)
         return entries
 

Index: channels/somafm.py
==================================================================
--- channels/somafm.py
+++ channels/somafm.py
@@ -24,11 +24,11 @@
 # Note that only 64bit AAC and 128bit MP3 are guaranteed
 # to be available. Most stations offer different bitrates,
 # but not all of them!
 
 
-from config import conf, dbg, __print__
+from config import *
 from channels import *
 import re
 import ahttp
 
 # TuneIn radio directory

Index: channels/surfmusik.py
==================================================================
--- channels/surfmusik.py
+++ channels/surfmusik.py
@@ -32,11 +32,11 @@
 #
 #
 
 import re
 import ahttp as http
-from config import conf, dbg, __print__
+from config import *
 from channels import *
 
 
 
 # Surfmusik sharing site

Index: channels/timer.py
==================================================================
--- channels/timer.py
+++ channels/timer.py
@@ -74,11 +74,11 @@
         
         # prepare spool
         self.sched = kronos.ThreadedScheduler()
         for row in self.streams:
             try: self.queue(row)
-            except Exception as e: __print__(dbg.ERR, "queuing error", e)
+            except Exception as e: log.ERR("queuing error", e)
         self.sched.start()
 
 
     # display GUI for setting timespec
     def edit_timer(self, *w):
@@ -134,11 +134,11 @@
             activity, action_method = "play", self.play
         
         # add
         task = self.sched.add_daytime_task(action_method, activity, days, None, time, kronos.method.threaded, [row], {})
 
-        #__print__( "queue",  act, self.sched, (action_method, act, days, None, time, kronos.method.threaded, [row], {}), task.get_schedule_time(True) )
+        #log.QUEUE( act, self.sched, (action_method, act, days, None, time, kronos.method.threaded, [row], {}), task.get_schedule_time(True) )
     
     
     
     # converts Mon,Tue,... into numberics 1-7
     def days(self, s):

Index: channels/tunein.py
==================================================================
--- channels/tunein.py
+++ channels/tunein.py
@@ -24,11 +24,11 @@
 #
 
 
 import re
 import json
-from config import conf, dbg, __print__
+from config import *
 from channels import *
 import ahttp as http
 from xml.etree import ElementTree
 
 

Index: channels/ubuntuusers.py
==================================================================
--- channels/ubuntuusers.py
+++ channels/ubuntuusers.py
@@ -20,11 +20,11 @@
 # but stations are grouped by country already.
 #
 
 
 import re
-from config import conf, dbg, __print__
+from config import *
 from channels import *
 import ahttp
 
 
 # UU Wiki radio list

Index: channels/useragentswitcher.py
==================================================================
--- channels/useragentswitcher.py
+++ channels/useragentswitcher.py
@@ -41,9 +41,9 @@
 
     # set new browser string in requests session
     def apply(self):
         ua = self.map.get(conf.useragent.lower(), self.map["default"])
         if ua:
-            __print__(dbg.HTTP, "UserAgentSwitcher:", ua)
+            log.HTTP("UserAgentSwitcher:", ua)
             ahttp.session.headers.update({ "User-Agent": ua })
 
 

Index: channels/xiph.py
==================================================================
--- channels/xiph.py
+++ channels/xiph.py
@@ -103,18 +103,18 @@
       if search:
           params["search"] = search
       
       #-- get data
       data = http.get(self.json_url, params=params)
-      #__print__(dbg.DATA, data)
+      #log.DATA(data)
       
       #-- extract
       l = []
-      __print__( dbg.PROC, "processing api.dir.xiph.org JSON (via api.include-once.org cache)" )
+      log.PROC( "processing api.dir.xiph.org JSON (via api.include-once.org cache)" )
       data = json.loads(data)
       for e in data:
-          #__print__(dbg.DATA, e)
+          #log.DATA(e)
           bitrate = int(e["bitrate"])
           if conf.xiph_min_bitrate and bitrate and bitrate >= int(conf.xiph_min_bitrate):
               if not len(l) or l[-1]["title"] != e["stream_name"]:
                   l.append({
                     "title": e["stream_name"],

Index: channels/youtube.py
==================================================================
--- channels/youtube.py
+++ channels/youtube.py
@@ -241,11 +241,11 @@
         params = dict( list(defaults.items()) + list(params.items())  )
 
         # Retrieve data set
         while pages > 0:
             j = ahttp.get(base_url + method, params=params)
-            #__print__(dbg.DATA, j)
+            #log.DATA(j)
             if j:
                 # json decode
                 data = json.loads(j)
                 
                 # extract items
@@ -302,11 +302,11 @@
         return data
 
 
     # API version 2.0s jsonified XML needs different unpacking:
     def wrap2(self, row):
-        #__print__(dbg.DATA, row)
+        #log.DATA(row)
         return dict(
             genre = row["category"][1]["term"],
             title = row["title"]["$t"],
             playing = row["author"][0]["name"]["$t"],
             format = self.audioformat,

Index: config.py
==================================================================
--- config.py
+++ config.py
@@ -9,11 +9,11 @@
 #    { arg: -e,     type: str,      name: enable[],  description: Add channel plugin.  }
 #    { arg: --gtk3, type: boolean,  name: gtk3,      description: Start with Gtk3 interface. }
 #    { arg: -D,     type: boolean,  name: debug,     description: Enable debug messages on console }
 #    { arg: action, type: str *,    name: action[],  description: CLI interface commands. }
 #    { arg: -x,     type: boolean,  name: exit,      hidden: 1 }
-# version: 2.5
+# version: 2.6
 # priority: core
 #
 # In the main application or module files which need access
 # to a global conf.* object, just import this module as follows:
 #
@@ -20,11 +20,11 @@
 #   from config import *
 #
 # Here conf is already an instantiation of the underlying
 # ConfigDoct class.
 #
-# Also provides the logging function __print__, and basic
+# Also provides the logging function log.TYPE(...) and basic
 # plugin handling code: plugin_meta() and module_list(),
 # and the relative get_data() alias (files from pyzip/path).
 #
 
 from __future__ import print_function
@@ -40,11 +40,11 @@
 import inspect
 import pkgutil
 import argparse
 
 # export symbols
-__all__ = ["conf", "log", "__print__", "dbg", "plugin_meta", "module_list", "get_data", "find_executable"]
+__all__ = ["conf", "log", "plugin_meta", "module_list", "get_data", "find_executable"]
 
 
 #-- create a stub instance of config object
 conf = object()
 
@@ -464,42 +464,21 @@
      )
     """, re.X)
 
 
 
-
-# wrapper for all print statements
-def __print__(*args):
-    if "debug" in conf and conf.debug or args[0] == dbg.ERR:
-        print(" ".join([str(a) for a in args]), file=sys.stderr)
-
-
-# error colorization
-dbg = type('obj', (object,), {
-    "ERR":  r"[ERR]",  # red    ERROR
-    "INIT": r"[INIT]", # red    INIT ERROR
-    "PROC": r"[PROC]", # green  PROCESS
-    "CONF": r"[CONF]", # brown  CONFIG DATA
-    "UI":   r"[UI]",   # blue   USER INTERFACE BEHAVIOUR
-    "HTTP": r"[HTTP]", # magenta HTTP REQUEST
-    "DATA": r"[DATA]", # cyan   DATA
-    "INFO": r"[INFO]", # gray   INFO
-    "STAT": r"[STATE]", # gray  CONFIG STATE
-})
-
-
 # Simplified print wrapper: `log.err(...)`
 class log_printer(object):
 
     # Wrapper
     method = None
     def __getattr__(self, name):
         self.method = name
-        return self.__print__
+        return self.log_print
     
     # Printer
-    def __print__(self, *args, **kwargs):
+    def log_print(self, *args, **kwargs):
         # debug level
         method = self.method.upper()
         if not method == "ERR":
             if "debug" in conf and not conf.debug:
                 return

Index: contrib/8tracks.py
==================================================================
--- contrib/8tracks.py
+++ contrib/8tracks.py
@@ -16,11 +16,11 @@
 # could work, or checking via dbus/mpris even.
 #
 
 import re
 import json
-from config import conf, dbg, __print__
+from config import *
 from channels import *
 import ahttp as http
 
 
 # Surfmusik sharing site

Index: help/action_saving.page
==================================================================
--- help/action_saving.page
+++ help/action_saving.page
@@ -17,12 +17,12 @@
 
 	<p>The extension of the filename decides on the saved link format.  Per
 	default a .m3u file is created, because that's what most audio players
 	understand.</p>
 
-	<note><p> But you can also save in <file>.pls</file> or <file>.xspf</file>
-	or <file>.asx</file> or <file>.smil</file> format.  Note that the
-	lower right dropdown has no effect.  You have to edit the extension
-	into the filename field.</p></note>
+	<note><p> You can also save in <file>.pls</file> or <file>.xspf</file>
+	or <file>.asx</file> or <file>.smil</file> format.  In current
+	releases the file extension is automatically adapted when changing
+	the filter dropdown (bottom right corner in the file dialog). </p></note>
 
 
 </page>

Index: help/search.page
==================================================================
--- help/search.page
+++ help/search.page
@@ -31,18 +31,18 @@
 	<media src="img/search.png" type="image" mime="image/png" />
 
         <p>It either will search in all channels, or just the last active
         channel/service.</p>
 
-	<p>Then there are two search methods.  You mostly want to use
+	<p>Then there are two search variants.  You mostly want to use
         the live <gui>Server search</gui>.  It passes your search terms to
         the actual directory services, and loads the most recent data into a
         result list. This might take a few seconds. And it's not implemented
-        for all channel plugins however.</p>
+        for all channel plugins. (Some websites/APIs don't have a search.)</p>
 
-	<p>With <gui>Cache find</gui> would just look up entries in your
+	<p>While <gui>Cache find</gui> would just look up entries in your
         already downloaded channel/genre lists.  This is sufficient when
-        you're just looking for something previously seen/listended to.</p>
+        you're just looking for something previously seen/listened to.</p>
 
 	</section>
 
 </page>

Index: st2.py
==================================================================
--- st2.py
+++ st2.py
@@ -121,11 +121,11 @@
 
         # display current open channel/notebook tab
         gui_startup(18/20.0)
         self.current_channel = self.current_channel_gtk()
         try: self.channel().first_show()
-        except: __print__(dbg.INIT, "main.__init__: current_channel.first_show() initialization error")
+        except: log.INIT("main.__init__: current_channel.first_show() initialization error")
 
   
         # bind gtk/glade event names to functions
         gui_startup(19.75/20.0)
         self.connect_signals({
@@ -230,11 +230,11 @@
     
         
     # Notebook tab has been clicked (receives numeric page_num), but *NOT* yet changed (visually).
     def channel_switch(self, notebook, page, page_num=0, *args):
         self.current_channel = notebook.get_menu_label_text(notebook.get_nth_page(page_num))
-        __print__(dbg.UI, "main.channel_switch() :=", self.current_channel)
+        log.UI("main.channel_switch() :=", self.current_channel)
         self.update_title()
         # if first selected, load current category
         # (run in thread, to make it look speedy on first startup)
         self.thread( self.channel().first_show )
 
@@ -277,17 +277,17 @@
         else: self.status("No homepage URL present.")
 
     # Browse to channel homepage (double click on notebook tab)
     def on_homepage_channel_clicked(self, widget, event=2):
         if event == 2 or event.type == gtk.gdk._2BUTTON_PRESS:
-            __print__(dbg.UI, "dblclick")
+            log.UI("dblclick")
             url = self.channel().meta.get("url", "https://duckduckgo.com/?q=" + self.channel().module)
             action.browser(url)
 
     # Reload stream list in current channel-category
     def on_reload_clicked(self, widget=None, reload=1):
-        __print__(dbg.UI, "on_reload_clicked()", "reload=", reload, "current_channel=", self.current_channel, "c=", self.channels[self.current_channel], "cat=", self.channel().current)
+        log.UI("on_reload_clicked()", "reload=", reload, "current_channel=", self.current_channel, "c=", self.channels[self.current_channel], "cat=", self.channel().current)
         category = self.channel().current
         self.thread(
                        #@TODO: should get a wrapper, for HTTP errors, and optionalize bookamrks
             lambda: (  self.channel().load(category,reload), reload and self.bookmarks.heuristic_update(self.current_channel,category)  )
         )
@@ -300,11 +300,11 @@
 
 
     # Click in category list
     def on_category_clicked(self, widget, event, *more):
         category = self.channel().currentcat()
-        __print__(dbg.UI, "on_category_clicked", category, self.current_channel)
+        log.UI("on_category_clicked", category, self.current_channel)
         self.on_reload_clicked(None, reload=0)
         pass
 
     # Add current selection to bookmark store
     def bookmark(self, widget):
@@ -389,11 +389,11 @@
             if not name in conf.plugins:
                 conf.add_plugin_defaults(plugin_meta(module=name), name)
             
             # skip module if disabled
             if conf.plugins.get(name, 1) == False:
-                __print__(dbg.STAT, "disabled plugin:", name)
+                log.STAT("disabled plugin:", name)
                 continue
             # or if it's a built-in (already imported)
             elif name in self.features or name in self.channels:
                 continue
             
@@ -410,11 +410,11 @@
                 # or .features{} for other plugin types
                 else:
                     self.features[name] = plugin_obj
                 
             except Exception as e:
-                __print__(dbg.INIT, "load_plugin_channels: error initializing:", name, ", exception:")
+                log.INIT("load_plugin_channels: error initializing:", name, ", exception:")
                 traceback.print_exc()
 
     # load application state (widget sizes, selections, etc.)
     def init_app_state(self):
 
@@ -496,11 +496,11 @@
             del conf.firstrun
 
         # run
         gtk.main()
         [callback() for callback in main.hooks["quit"]]
-        __print__(dbg.PROC, r" gtk_main_quit ")
+        log.PROC(r" gtk_main_quit ")
         
     # invoke command-line interface
     else:
         import cli
         cli.StreamTunerCLI(conf.args.action)

Index: uikit.py
==================================================================
--- uikit.py
+++ uikit.py
@@ -44,12 +44,12 @@
     pygtk.enable() 
     pygtk.enable_gtk(version='3.0')
     from gi.repository import Gtk as gtk
     from gi.repository import GObject as gobject
     from gi.repository import GdkPixbuf
-    __print__(dbg.STAT, gtk)
-    __print__(dbg.STAT, gobject)
+    log.STAT(gtk)
+    log.STAT(gobject)
 else:
     import pygtk
     import gtk
     import gobject
     GdkPixbuf = gtk.gdk
@@ -129,11 +129,11 @@
                     # apply attributes
                     for attr,val in list(cell[3].items()):
                         col.add_attribute(rend, attr, val)
                     # next
                     datapos += 1
-                    #__print__(dbg.INFO, cell, len(cell))
+                    #log.INFO(cell, len(cell))
 
                 # add column to treeview
                 widget.append_column(col)
             # finalize widget
             widget.set_search_column(5)   #??
@@ -153,12 +153,12 @@
                     for var in xrange(2, len(desc)):
                         vartypes.append(desc[var][1])  # content types
                         rowmap.append(desc[var][0])    # dict{} column keys in entries[] list
             # create gtk array storage
             ls = gtk.ListStore(*vartypes)   # could be a TreeStore, too
-            #__print__(dbg.UI, vartypes, len(vartypes))
-            #__print__(dbg.DATA, rowmap, len(rowmap))
+            #log.UI(vartypes, len(vartypes))
+            #log.DATA(rowmap, len(rowmap))
  
             # prepare for missing values, and special variable types
             defaults = {
                 str: "",
                 unicode: "",
@@ -194,11 +194,11 @@
                 except:
                     # brute-force typecast
                     ls.append( [va  if ty==gtk.gdk.Pixbuf  else ty(va)   for va,ty in zip(row,vartypes)]  )
 
             #if entries:
-                 #__print__("→", row, len(row))
+            #     log.ROWS(row, len(row))
             
             # apply array to widget
             widget.set_model(ls)
             return ls
             
@@ -215,11 +215,11 @@
     @staticmethod     
     def tree(widget, entries, title="category", icon=gtk.STOCK_DIRECTORY):
 
         # list types
         ls = gtk.TreeStore(str, str)
-        #__print__(dbg.DATA, ".tree", entries)
+        #log.DATA(".tree", entries)
 
         # add entries
         for entry in entries:
             if isinstance(entry, (str,unicode)):
                 main = ls.append(None, [str(entry), icon])