Internet radio browser GUI for music/video streams from various directory services.

⌈⌋ ⎇ branch:  streamtuner2


Check-in [95166f7d96]

Overview
Comment:New Freebase topic IDs, fix progressbar for multi-page collecting.
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA1: 95166f7d96612acd1e242b633b4958432dd3e8e5
User & Date: mario on 2014-05-26 14:06:30
Other Links: manifest | tags
Context
2014-05-26
15:32
Move status.progressbar init and cleanup into GenericChannel.load() check-in: 8c1da4e0f7 user: mario tags: trunk
14:06
New Freebase topic IDs, fix progressbar for multi-page collecting. check-in: 95166f7d96 user: mario tags: trunk
14:05
Fix ProgressBar for Py2, don't use default steps in HTTP retrieval. check-in: c5251618b3 user: mario tags: trunk
Changes

Modified channels/youtube.py from [c4f0c5f1c6] to [2213612999].

83
84
85
86
87
88
89
90
91
92

93
94
95
96
97
98
99
                "fields": "pageInfo,nextPageToken,items(id,snippet(title,thumbnails/default/url,channelTitle))",
            }
        ]
    }

    categories = [
        "mostPopular",
        ["Music", "Comedy", "Movies", "Shows", "Short Movies", "Trailers", "Film & Animation", "Entertainment", "News & Politics", "Sci-Fi/Fantasy"],
        "topics",
        ["music", "pop", "music video"],

        "my channels",
        ["Key of Awesome", "Pentatonix"]
    ] 

    # plugin settings 
    config = [
       {







|

|
>







83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
                "fields": "pageInfo,nextPageToken,items(id,snippet(title,thumbnails/default/url,channelTitle))",
            }
        ]
    }

    categories = [
        "mostPopular",
        ["Music", "Comedy", "Movies", "Shows", "Short Movies", "Trailers", "Film & Animation", "Entertainment", "News & Politics"],
        "topics",
        ["Pop", "Billboard charts", "Rock", "Hip Hop", "Classical", "Soundtrack", "Ambient",
         "Jazz", "Blues", "Soul", "Country", "Disco", "Dance", "House", "Trance", "Techno", "Electronica"],
        "my channels",
        ["Key of Awesome", "Pentatonix"]
    ] 

    # plugin settings 
    config = [
       {
145
146
147
148
149
150
151
152
153









154






155
156
157
158
159
160
161
162
163
164
165
166
167
168

169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
        "Thriller": 41,
        "Shorts": 42,
        "Shows": 43,
        "Trailers": 44,
    }
    # Freebase topics
    topic_id = {
        "music": "/m/0kpv0g",
        "pop": "/m/064t9",









        "music video": "/m/05k5h7g",






    }


    # just a static list for now
    def update_categories(self):
        i = self.categories.index("my channels") + 1
        self.categories[i] = [ title.strip() for title in conf.youtube_channels.split(",") ]


    # retrieve and parse
    def update_streams(self, cat, force=0, search=None):

        entries = []
        channels = self.categories[self.categories.index("my channels") + 1]

        
        # Most Popular
        if cat == "mostPopular":
            #for row in self.api("feeds/api/standardfeeds/%s/most_popular"%conf.youtube_region, ver=2):
            #    entries.append(self.wrap2(row))
            for row in self.api("videos", chart="mostPopular", regionCode=conf.youtube_region):
                entries.append( self.wrap3(row, {"genre": "mostPopular"}) )

        # Categories
        elif cat in self.videocat_id:
            for row in self.api("search", chart="mostPopular", videoCategoryId=self.videocat_id[cat], order="date", type="video"):
                entries.append( self.wrap3(row, {"genre": cat}) )

        # Topics
        elif cat in self.topic_id:
            for row in self.api("search", order="date", regionCode=conf.youtube_region, topicId=self.topic_id[cat], type="video"):
                entries.append( self.wrap3(row, {"genre": cat}) )

        # My Channels
        # - searches channel id for given title
        # - iterates over playlist
        # - then over playlistitems to get videos
        elif cat in channels:







<

>
>
>
>
>
>
>
>
>
|
>
>
>
>
>
>














>














|
|







146
147
148
149
150
151
152

153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
        "Thriller": 41,
        "Shorts": 42,
        "Shows": 43,
        "Trailers": 44,
    }
    # Freebase topics
    topic_id = {

        "pop": "/m/064t9",
        "billboard charts": "/m/04qf57",
        "rock": "/m/06by7",
        "dance": "/m/0ggx5q",
        "classical": "/m/0ggq0m",
        "hip hop": "/m/0glt670",
        "soundtrack": "/m/0l14gg",
        "ambient": "/m/0fd3y",
        "electronica": "/m/0m0jc",
        "jazz": "/m/03_d0",
        "techno": "/m/07gxw",
        "disco": "/m/026z9",
        "country": "/m/01lyv",
        "blues": "/m/0155w",
        "soul": "/m/0gywn",
        "trance": "/m/07lnk",
        "house": "/m/03mb9",
    }


    # just a static list for now
    def update_categories(self):
        i = self.categories.index("my channels") + 1
        self.categories[i] = [ title.strip() for title in conf.youtube_channels.split(",") ]


    # retrieve and parse
    def update_streams(self, cat, force=0, search=None):

        entries = []
        channels = self.categories[self.categories.index("my channels") + 1]
        self.parent.status(-0.1)
        
        # Most Popular
        if cat == "mostPopular":
            #for row in self.api("feeds/api/standardfeeds/%s/most_popular"%conf.youtube_region, ver=2):
            #    entries.append(self.wrap2(row))
            for row in self.api("videos", chart="mostPopular", regionCode=conf.youtube_region):
                entries.append( self.wrap3(row, {"genre": "mostPopular"}) )

        # Categories
        elif cat in self.videocat_id:
            for row in self.api("search", chart="mostPopular", videoCategoryId=self.videocat_id[cat], order="date", type="video"):
                entries.append( self.wrap3(row, {"genre": cat}) )

        # Topics
        elif cat.lower() in self.topic_id:
            for row in self.api("search", order="date", regionCode=conf.youtube_region, topicId=self.topic_id[cat.lower()], type="video"):
                entries.append( self.wrap3(row, {"genre": cat}) )

        # My Channels
        # - searches channel id for given title
        # - iterates over playlist
        # - then over playlistitems to get videos
        elif cat in channels:
208
209
210
211
212
213
214

215
216
217
218
219
220
221
                entries.append( self.wrap3(row, {"genre": ""}) )
        
        # empty entries
        else:
            entries = [dict(title="Placeholder for subcategories", genre="./.", playing="./.", url="http://youtube.com/")]
 
        # done    

        return entries
        

    
    # Search for channel name:
    def channel_id(self, title):
        id = self.channel2id.get(title)







>







224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
                entries.append( self.wrap3(row, {"genre": ""}) )
        
        # empty entries
        else:
            entries = [dict(title="Placeholder for subcategories", genre="./.", playing="./.", url="http://youtube.com/")]
 
        # done    
        self.parent.status(1.0)
        return entries
        

    
    # Search for channel name:
    def channel_id(self, title):
        id = self.channel2id.get(title)
232
233
234
235
236
237
238

239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
    #-- Retrieve Youtube API query results
    #
    def api(self, method, ver=3, pages=5, **params):
        items = []

        # URL and default parameters
        (base_url, defaults) = self.service[ver]

        params = dict(defaults.items() + params.items())

        # Retrieve data set
        while pages > 0:
            j = ahttp.get(base_url + method, params=params)
            __print__(dbg.DATA, j)
            if j:
                # json decode
                data = json.loads(j)
                
                # extract items
                if "items" in data:
                    items += data["items"]
                elif "feed" in data:
                    items += data["feed"]["entry"]
                else:
                    pages = 0

            # Continue to load results?
            if items >= conf.max_streams:
                pages = 0
            elif "pageInfo" in data and data["pageInfo"]["totalResults"] < 50:
                pages = 0
            elif "nextPageToken" in data:
                params["pageToken"] = data["nextPageToken"]
                pages -= 1
            else:
                pages = 0
            self.parent.status(pages / 10.0)

        return items



    # Wrap API 3.0 result into streams row
    def wrap3(self, row, data):







>
|




|













|








|







249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
    #-- Retrieve Youtube API query results
    #
    def api(self, method, ver=3, pages=5, **params):
        items = []

        # URL and default parameters
        (base_url, defaults) = self.service[ver]
        #params = dict(  defaults.items() | params.items()  )
        params = dict( defaults, **params )

        # Retrieve data set
        while pages > 0:
            j = ahttp.get(base_url + method, params=params)
            #__print__(dbg.DATA, j)
            if j:
                # json decode
                data = json.loads(j)
                
                # extract items
                if "items" in data:
                    items += data["items"]
                elif "feed" in data:
                    items += data["feed"]["entry"]
                else:
                    pages = 0

            # Continue to load results?
            if len(items) >= int(conf.max_streams):
                pages = 0
            elif "pageInfo" in data and data["pageInfo"]["totalResults"] < 50:
                pages = 0
            elif "nextPageToken" in data:
                params["pageToken"] = data["nextPageToken"]
                pages -= 1
            else:
                pages = 0
            self.parent.status( (11 - 1.852 * pages) / 10.0 )

        return items



    # Wrap API 3.0 result into streams row
    def wrap3(self, row, data):