62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132 | 58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152 |
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
+
-
-
+
+
+
-
-
+
+
+
+
-
-
-
+
+
+
-
-
-
+
-
-
-
-
-
+
-
-
-
-
+
+
+
+
-
-
-
+
+
+
-
-
-
+
+
-
-
+
+
-
-
-
+
+
+
+
+
+
-
-
+
|
# superclass
ChannelPlugin.__init__(self, parent)
# read category thread from /listen/browse.live
def update_categories(self):
self.categories = []
return
# fetch page
html = http.get("http://www.live365.com/index.live", feedback=self.parent.status);
rx_genre = re.compile("""
href=['"]/genres/([\w\d%+]+)['"][^>]*>
( (?:<nobr>)? )
( \w[-\w\ /'.&]+ )
( (?:</a>)? )
""", re.X|re.S)
# collect
last = []
for uu in rx_genre.findall(html):
(link, sub, title, main) = uu
# main
if main and not sub:
self.categories.append(title)
self.categories.append(last)
last = []
# subcat
else:
last.append(title)
# don't forget last entries
self.categories.append(last)
# extract stream infos
def update_streams(self, cat, search=""):
# search / url
if (not search):
url = "http://www.live365.com/genres/" + self.cat2tag(cat)
url = "http://www.live365.com/cgi-bin/directory.cgi?first=1&rows=200&mode=2&genre=" + self.cat2tag(cat)
else:
url = "http://www.live365.com/cgi-bin/directory.cgi?mode=2&site=web&searchdesc=" + urllib.quote(search)
url = "http://www.live365.com/cgi-bin/directory.cgi?site=..&searchdesc=" + urllib.quote(search) + "&searchgenre=" + self.cat2tag(cat) + "&x=0&y=0"
html = http.get(url, feedback=self.parent.status)
# we only need to download one page, because live365 always only gives 200 results
# extract JS calls
# terse format
rx = re.compile(r"""
['"](OK|PM_ONLY|SUBSCRIPTION).*?
new\s+top\.Station;
\s+ stn.set\("stationName", \s+ "(\w+)"\);
href=['"](http://www.live365.com/stations/\w+)['"].*?
page['"]>([^<>]*)</a>.*?
CLASS=['"]genre-link['"][^>]*>(.+?)</a>.+?
&station_id=(\d+).+?
\s+ stn.set\("title", \s+ "([^"]+)"\);
\s+ stn.set\("id", \s+ "(\d+)"\);
\s+ stn.set\("listenerAccess", \s+ "(\w+)"\);
class=["']desc-link['"][^>]+>([^<>]*)<.*?
=["']audioQuality.+?>(\d+)\w<.+?
>DrawListenerStars\((\d+),.+?
\s+ stn.set\("status", \s+ "(\w+)"\);
\s+ stn.set\("serverMode", \s+ "(\w+)"\);
\s+ stn.set\("rating", \s+ "(\d+)"\);
>DrawRatingStars\((\d+),\s+(\d+),.*?
\s+ stn.set\("ratingCount", \s+ "(\d+)"\);
\s+ stn.set\("tlh", \s+ "(\d+)"\);
\s+ stn.set\("imgUrl", \s+ "([^"]+)"\);
\s+ stn.set\("location", \s+ "([^"]+)"\);
""", re.X|re.I|re.S|re.M)
# src="(http://www.live365.com/.+?/stationlogo\w+.jpg)".+?
#('jtava', 'ANRLIVE.NET', '293643', 'PUBLIC', 'OK', 'OR', '298', '31',
#'98027', 'http://www.live365.com/userdata/37/15/1371537/stationlogo80x45.jpg', 'n/a')
# append entries to result list
__print__( html )
ls = []
for row in rx.findall(html):
__print__( row )
points = int(row[8])
count = int(row[9])
ls.append({
"launch_id": row[0],
"sofo": row[0], # subscribe-or-fuck-off status flags
"state": ("" if row[0]=="OK" else gtk.STOCK_STOP),
"title": entity_decode(row[1]),
"station_id": row[2],
"sofo": row[3],
"homepage": entity_decode(row[1]),
"title": entity_decode(row[2]),
"genre": self.strip_tags(row[3]),
"state": ("" if row[4]=="OK" else gtk.STOCK_STOP),
"rating": int(row[6]),
"listeners": int(row[8]),
"bitrate": int(row[6]),
"listeners": int(row[7]),
"img_": row[9],
"description": entity_decode(row[10]),
"max": 0,
"rating": (points + count**0.4) / (count - 0.001*(count-0.1)), # prevents division by null, and slightly weights (more votes are higher scored than single votes)
"rating_points": points,
"genre": cat,
"bitrate": 128,
"playing": "",
"rating_count": count,
# id for URL:
"station_id": row[4],
"url": self.base_url + "play/" + row[4],
"description": entity_decode(row[5]),
#"playing": row[10],
"url": "http://www.live365.com/cgi-bin/mini.cgi?version=3&templateid=xml&from=web&site=web&caller=&tag=web&station_name="+row[0]+"&_=1388870321828",
"format": "application/xml",
# "deleted": row[0] != "OK",
})
return ls
# faster if we do it in _update() prematurely
#def prepare(self, ls):
# GenericChannel.prepare(ls)
# for row in ls: |