Internet radio browser GUI for music/video streams from various directory services.

⌈⌋ ⎇ branch:  streamtuner2


Check-in [b9af78503d]

Overview
Comment:Fully replace ahttp.ajax with ahttp.get() wrapper
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA1: b9af78503df83d4efe374bbe0893506d5d6fdb63
User & Date: mario on 2014-04-27 22:48:23
Other Links: manifest | tags
Context
2014-04-28
00:19
Adapted DMOZ retrieval to changed HTML check-in: a44d5a6e74 user: mario tags: trunk
2014-04-27
23:33
Experimental Xiph.org JSON API (doesn't work) check-in: a1c4d0960a user: mario tags: xiphjson
22:48
Fully replace ahttp.ajax with ahttp.get() wrapper check-in: b9af78503d user: mario tags: trunk
22:19
Python3 support back into trunk check-in: 9ecea4fb26 user: mario tags: trunk
Changes

Modified ahttp.py from [4f43fe032b] to [cfbb6a842e].

8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26



27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56



57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
#
#  Provides a http GET method with gtk.statusbar() callback.
#  And a function to add trailings slashes on http URLs.
#
#


from compat2and3 import urllib2, urlencode, urlparse, cookielib, StringIO, xrange, PY3
from gzip import GzipFile
from config import conf, __print__, dbg
import requests
import copy



#-- chains to progress meter and status bar in main window
feedback = None

# sets either text or percentage, so may take two parameters



def progress_feedback(*args):

  # use reset values if none given
  if not args:
     args = ["", 1.0]

  # send to main win
  if feedback:
    try: [feedback(d) for d in args]
    except: pass




# default HTTP headers for AJAX/POST request
default_headers = {
    "User-Agent": "streamtuner2/2.1 (X11; U; Linux AMD64; en; rv:1.5.0.1) like WinAmp/2.1 but not like Googlebot/2.1", #"Mozilla/5.0 (X11; U; Linux x86_64; de; rv:1.9.2.6) Gecko/20100628 Ubuntu/10.04 (lucid) Firefox/3.6.6",
    "Accept": "*/*;q=0.5, audio/*, url/*",
    "Accept-Language": "en-US,en,de,es,fr,it,*;q=0.1",
    "Accept-Encoding": "gzip,deflate",
    "Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.1",
    "Keep-Alive": "115",
    "Connection": "keep-alive",
    "Pragma": "no-cache",
    "Cache-Control": "no-cache",
}



#-- GET



def get(url, params={}, referer="", post=0, ajax=0, binary=0):
    __print__( dbg.HTTP, "GET", url)

    # statusbar info
    progress_feedback(url, 0.1)
    
    # combine headers
    headers = copy.copy(default_headers)
    if ajax:
        headers["X-Requested-With"] = "XMLHttpRequest"
    if referer:
        headers["Referer"] = (referer if referer else url)
    
    # read
    if post:
        __print__("POST")
        r = requests.post(url, params=params, headers=headers)
    else:    
        __print__("GET")
        r = requests.get(url, params=params, headers=headers)
        
    # result
    progress_feedback(0.9)
    content = (r.content if binary else r.text)
    
    # finish, clean statusbar
    progress_feedback()
    __print__( dbg.INFO, "Content-Length", len(content) )
    return content




# simulate ajax calls
def ajax(url, params, referer="", binary=0):
    get(url, params, referer, binary, ajax=1)



#-- fix invalid URLs
def fix_url(url):
    if url is None:
        url = ""
    if len(url):
        # remove whitespace
        url = url.strip()
        # add scheme







<
<






|


|
>
>
>














|














|
>
>
>
|














<


<














<
<
<
|
<
<
<







8
9
10
11
12
13
14


15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75

76
77

78
79
80
81
82
83
84
85
86
87
88
89
90
91



92



93
94
95
96
97
98
99
#
#  Provides a http GET method with gtk.statusbar() callback.
#  And a function to add trailings slashes on http URLs.
#
#




from config import conf, __print__, dbg
import requests
import copy



#-- hooks to progress meter and status bar in main window
feedback = None

# Sets either text or percentage of main windows' status bar.
#
# Can either take a float parameter (e.g. 0.99 for % indicator)
# or text message. Alternatively two parameters to update both.
def progress_feedback(*args):

  # use reset values if none given
  if not args:
     args = ["", 1.0]

  # send to main win
  if feedback:
    try: [feedback(d) for d in args]
    except: pass




# default HTTP headers for requests
default_headers = {
    "User-Agent": "streamtuner2/2.1 (X11; U; Linux AMD64; en; rv:1.5.0.1) like WinAmp/2.1 but not like Googlebot/2.1", #"Mozilla/5.0 (X11; U; Linux x86_64; de; rv:1.9.2.6) Gecko/20100628 Ubuntu/10.04 (lucid) Firefox/3.6.6",
    "Accept": "*/*;q=0.5, audio/*, url/*",
    "Accept-Language": "en-US,en,de,es,fr,it,*;q=0.1",
    "Accept-Encoding": "gzip,deflate",
    "Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.1",
    "Keep-Alive": "115",
    "Connection": "keep-alive",
    "Pragma": "no-cache",
    "Cache-Control": "no-cache",
}



#-- Retrieve data via HTTP
#
#  Well, it says "get", but it actually does POST and AJAXish GET requests too.
#
def get(url, params={}, referer="", post=0, ajax=0, binary=0, feedback=None):
    __print__( dbg.HTTP, "GET", url)

    # statusbar info
    progress_feedback(url, 0.1)
    
    # combine headers
    headers = copy.copy(default_headers)
    if ajax:
        headers["X-Requested-With"] = "XMLHttpRequest"
    if referer:
        headers["Referer"] = (referer if referer else url)
    
    # read
    if post:

        r = requests.post(url, params=params, headers=headers)
    else:    

        r = requests.get(url, params=params, headers=headers)
        
    # result
    progress_feedback(0.9)
    content = (r.content if binary else r.text)
    
    # finish, clean statusbar
    progress_feedback()
    __print__( dbg.INFO, "Content-Length", len(content) )
    return content







#-- Append missing trailing slash to URLs



def fix_url(url):
    if url is None:
        url = ""
    if len(url):
        # remove whitespace
        url = url.strip()
        # add scheme

Modified channels/musicgoal.py from [faf1e3671e] to [487d9e3db3].

66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
            elif cat in self.radio:
                grp = "radio"
                url = self.api_radio % cat.lower().replace(" ","").replace("&","")
            else:
                return []
                
            # retrieve API data
            data = http.ajax(url, None)
            data = json.loads(data)
                
            # tranform datasets            
            if grp == "podcast":
                return [{
                    "genre": cat,
                    "title": row["titel"],







|







66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
            elif cat in self.radio:
                grp = "radio"
                url = self.api_radio % cat.lower().replace(" ","").replace("&","")
            else:
                return []
                
            # retrieve API data
            data = http.get(url, params=None, ajax=1)
            data = json.loads(data)
                
            # tranform datasets            
            if grp == "podcast":
                return [{
                    "genre": cat,
                    "title": row["titel"],

Modified channels/myoggradio.py from [6c4dac0144] to [68cf24bd7d].

153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
                "typ": e["format"][6:],
                "eintragen": "eintragen", # form
            }

            # just push data in, like the form does
            if form:
                self.login()
                http.ajax(self.api + "c_neu.jsp", submit)

            # use JSON interface
            else:
                http.ajax(self.api + "commonadd.json?" + urllib.urlencode(submit))
    
            
    # authenticate against MyOggRadio
    def login(self):
        login = self.user_pw()    
        if login:
            data = dict(zip(["benutzer", "passwort"], login))
            http.ajax(self.api + "c_login.jsp", data)
            # let's hope the JSESSIONID cookie is kept


    # returns login (user,pw)
    def user_pw(self):
        if conf.myoggradio_login != "user:password":
            return conf.myoggradio_login.split(":")
        else: pass
        











|



|







|













153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
                "typ": e["format"][6:],
                "eintragen": "eintragen", # form
            }

            # just push data in, like the form does
            if form:
                self.login()
                http.get(self.api + "c_neu.jsp", params=submit, ajax=1, post=1)

            # use JSON interface
            else:
                http.get(self.api + "commonadd.json", params=submit, ajax=1)
    
            
    # authenticate against MyOggRadio
    def login(self):
        login = self.user_pw()    
        if login:
            data = dict(zip(["benutzer", "passwort"], login))
            http.get(self.api + "c_login.jsp", params=data, ajax=1)
            # let's hope the JSESSIONID cookie is kept


    # returns login (user,pw)
    def user_pw(self):
        if conf.myoggradio_login != "user:password":
            return conf.myoggradio_login.split(":")
        else: pass
        




Modified channels/xiph.py from [e14b45bb99] to [04e8c2b58d].

117
118
119
120
121
122
123
124
125
126
127

128
129
130
131
132
133
134
        def update_streams(self, cat, search=""):

            # there is actually just a single category to download,
            # all else are virtual
            if (cat == "all"):
            
                #-- get data
                yp = http.get(self.base_url + self.yp, 1<<22, feedback=self.parent.status)
                
                #-- extract
                l = []

                for entry in xml.dom.minidom.parseString(yp).getElementsByTagName("entry"):
                    bitrate = self.bitrate(self.x(entry, "bitrate"))
                    if conf.xiph_min_bitrate and bitrate and bitrate >= int(conf.xiph_min_bitrate):
                      l.append({
                        "title": str(self.x(entry, "server_name")),
                        "url": str(self.x(entry, "listen_url")),
                        "format": self.mime_fmt(str(self.x(entry, "server_type"))[6:]),







|



>







117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
        def update_streams(self, cat, search=""):

            # there is actually just a single category to download,
            # all else are virtual
            if (cat == "all"):
            
                #-- get data
                yp = http.get(self.base_url + self.yp)
                
                #-- extract
                l = []
                __print__( dbg.DATA, "xml.dom.minidom parses yp.xml" )
                for entry in xml.dom.minidom.parseString(yp).getElementsByTagName("entry"):
                    bitrate = self.bitrate(self.x(entry, "bitrate"))
                    if conf.xiph_min_bitrate and bitrate and bitrate >= int(conf.xiph_min_bitrate):
                      l.append({
                        "title": str(self.x(entry, "server_name")),
                        "url": str(self.x(entry, "listen_url")),
                        "format": self.mime_fmt(str(self.x(entry, "server_type"))[6:]),

Modified favicon.py from [983556d34c] to [b3ed705ebd].

84
85
86
87
88
89
90
91
92
93
94
95
96
97
98

        # extract first title parts
        title = rx_t.search(row["title"])
        if title:
            title = title.group(0).replace(" ", "%20")
            
            # do a google search
            html = ahttp.ajax("http://www.google.de/search?hl=de&q="+title, None)
            
            # find first URL hit
            url = rx_u.search(html)
            if url:
                row["homepage"] = ahttp.fix_url(url.group(1))
    pass
#-----------------







|







84
85
86
87
88
89
90
91
92
93
94
95
96
97
98

        # extract first title parts
        title = rx_t.search(row["title"])
        if title:
            title = title.group(0).replace(" ", "%20")
            
            # do a google search
            html = ahttp.get("http://www.google.de/search?hl=de&q="+title, params={}, ajax=1)
            
            # find first URL hit
            url = rx_u.search(html)
            if url:
                row["homepage"] = ahttp.fix_url(url.group(1))
    pass
#-----------------