Internet radio browser GUI for music/video streams from various directory services.

⌈⌋ ⎇ branch:  streamtuner2


Check-in [6dfe1fdeb5]

Overview
Comment:Add overly crude playlist_convert.title() extraction (for M3U/PLS/XSPF/ASX and the custom STRING/TEXT format used by DND module). Insert imported rows at the right position. Scrolling won't work, because it's done in a separate thread.
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA1: 6dfe1fdeb5c3aca6ceb0cfd5c4bc3550ce4256c5
User & Date: mario on 2015-04-20 23:18:26
Other Links: manifest | tags
Context
2015-04-20
23:18
Shorten "Ogg Vorbis 112kbit/s" option name (was expanding the general settings dialog too much). check-in: 8759215e26 user: mario tags: trunk
23:18
Add overly crude playlist_convert.title() extraction (for M3U/PLS/XSPF/ASX and the custom STRING/TEXT format used by DND module). Insert imported rows at the right position. Scrolling won't work, because it's done in a separate thread. check-in: 6dfe1fdeb5 user: mario tags: trunk
16:24
Move playlist extension and context probing into separate functions. Introduce some rather crude import functionality for a few playlist file formats. (Still requires proper importer with title= reading, and entirely rows[] based function signatures in action module.) check-in: 8e3b1e4d5b user: mario tags: trunk
Changes

Modified action.py from [ee9e1bcb9d] to [d87b406b97].

324
325
326
327
328
329
330


331













332
333
334
335
336
337
338
        urls = re.findall(rx, self.src, re.X)
        # decode urls
        if decode in ("xml", "*"):
            urls = [xmlunescape(url) for url in urls]
        if decode in ("json", "*"):
            urls = [url.replace("\\/", "/") for url in urls]
        # only uniques


        return list(set(urls))














    # Only look out for URLs, not local file paths, nor titles
    extr_urls = (
       ("pls",  (r"(?im) ^ \s*File\d* \s*=\s* (\w+://[^\s]+) ", None)),
       ("m3u",  (r" (?m) ^( \w+:// [^#\n]+ )", None)),
       ("xspf", (r" (?x) <location> (\w+://[^<>\s]+) </location> ", "xml")),
       ("asx",  (r" (?x) <ref \b[^>]+\b href \s*=\s* [\'\"] (\w+://[^\s\"\']+) [\'\"] ", "xml")),







>
>
|
>
>
>
>
>
>
>
>
>
>
>
>
>







324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
        urls = re.findall(rx, self.src, re.X)
        # decode urls
        if decode in ("xml", "*"):
            urls = [xmlunescape(url) for url in urls]
        if decode in ("json", "*"):
            urls = [url.replace("\\/", "/") for url in urls]
        # only uniques
        uniq = []
        urls = [uniq.append(u) for u in urls if not u in uniq]
        return uniq

    # Try to capture common title schemes 
    def title(self):
        t = re.search(r"""(?:
              ^Title\d*=(.+)
           |  ^\#EXTINF[-:\d,]*(.+)
           |  <title>([^<>]+)
           |  (?i)Title[\W]+(.+)
        )""", self.src, re.X|re.M)
        for i in range(1,10):
            if t and t.group(i):
                return t.group(i)
        

    # Only look out for URLs, not local file paths, nor titles
    extr_urls = (
       ("pls",  (r"(?im) ^ \s*File\d* \s*=\s* (\w+://[^\s]+) ", None)),
       ("m3u",  (r" (?m) ^( \w+:// [^#\n]+ )", None)),
       ("xspf", (r" (?x) <location> (\w+://[^<>\s]+) </location> ", "xml")),
       ("asx",  (r" (?x) <ref \b[^>]+\b href \s*=\s* [\'\"] (\w+://[^\s\"\']+) [\'\"] ", "xml")),

Modified channels/dnd.py from [9f80845729] to [ef4879a2ca].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18

19
20
21
22
23
24
25
# encoding: UTF-8
# api: streamtuner2
# title: Drag and Drop
# description: Move streams/stations from and to other applications.
# depends: uikit
# version: 0.1
# type: interface
# config:
#   { name: dnd_format, type: select, value: xspf, select: "pls|m3u|xspf|jspf|asx|smil", description: "Default temporary file format for copying a station entry." }
# category: ui
# priority: experimental
#
# Implements Gtk/X11 drag and drop support for station lists.
# Should allow to export either just stream URLs, or complete
# PLS, XSPF collections.
#
# Also used by the bookmarks channel to copy favourites around.
# Which perhaps should even be constrained to just the bookmarks tab.



import copy
from config import conf, json, log
from uikit import *
import action



|
|

|











|
>







1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
# encoding: UTF-8
# api: streamtuner2
# title: Drag and Drop (experimental)
# description: Copy streams/stations from and to other applications.
# depends: uikit
# version: 0.5
# type: interface
# config:
#   { name: dnd_format, type: select, value: xspf, select: "pls|m3u|xspf|jspf|asx|smil", description: "Default temporary file format for copying a station entry." }
# category: ui
# priority: experimental
#
# Implements Gtk/X11 drag and drop support for station lists.
# Should allow to export either just stream URLs, or complete
# PLS, XSPF collections.
#
# Also used by the bookmarks channel to copy favourites around.
# Which perhaps should even be constrained to just the bookmarks
# tab.


import copy
from config import conf, json, log
from uikit import *
import action

80
81
82
83
84
85
86


87
88
89
90
91
92
93
      ("text/uri-list", 0, 4),
      # url+comments
      ("TEXT", 0, 5),
      ("STRING", 0, 5),
      ("UTF8_STRING", 0, 5),
      ("text/plain", 0, 5),
    ]


    cnv_types = {
       20: "m3u",
       21: "pls",
       22: "xspf",
       23: "smil",
       25: "jspf",
       15: "srv",







>
>







81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
      ("text/uri-list", 0, 4),
      # url+comments
      ("TEXT", 0, 5),
      ("STRING", 0, 5),
      ("UTF8_STRING", 0, 5),
      ("text/plain", 0, 5),
    ]
    
    # Map target/`info` integers to action. module identifiers
    cnv_types = {
       20: "m3u",
       21: "pls",
       22: "xspf",
       23: "smil",
       25: "jspf",
       15: "srv",
135
136
137
138
139
140
141

142
143
144
145
146
147
148
149
150
    # Keep currently selected row when source dragging starts
    def treelist_row(self):
        cn = self.parent.channel()
        row = copy.copy(cn.row())
        row.setdefault("format", cn.audioformat)
        row.setdefault("listformat", cn.listformat)
        row.setdefault("url", row.get("homepage"))

        return row
        
    # Target window/app requests data for offered drop
    def data_get(self, widget, context, selection, info, time):
        log.DND("source→out: data-get, send and convert to requested target type:", info, selection.get_target())
        # Return prepared data
        func, data = self.export_row(info, self.row)
        if func.find("text") >= 0:
            # Yay for trial and error. Nay for docs. PyGtks selection.set_text() doesn't







>

|







138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
    # Keep currently selected row when source dragging starts
    def treelist_row(self):
        cn = self.parent.channel()
        row = copy.copy(cn.row())
        row.setdefault("format", cn.audioformat)
        row.setdefault("listformat", cn.listformat)
        row.setdefault("url", row.get("homepage"))
        row.update({"_origin": [cn.module, cn.current, cn.rowno()]}) # internal: origin channel+genre+rowid
        return row
    
    # Target window/app requests data for offered drop
    def data_get(self, widget, context, selection, info, time):
        log.DND("source→out: data-get, send and convert to requested target type:", info, selection.get_target())
        # Return prepared data
        func, data = self.export_row(info, self.row)
        if func.find("text") >= 0:
            # Yay for trial and error. Nay for docs. PyGtks selection.set_text() doesn't
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229

230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257




258
259
260
261
262
263
264
265
266


267
268
269
270
271
272
273
274
275
276
277
278

        # incoming data
        data = selection.get_text()
        urls = selection.get_uris()
        any = (data or urls) and True

        # Convert/Add
        if any:
            self.import_row(info, urls, data, y)
        else:
            log.DND("abort, no urls/text")
        
        # Respond
        context.drop_finish(any, time)
        context.finish(any, False, time)
        return True

    # Received files or payload has to be converted, copied into streams
    def import_row(self, info, urls, data, y=5000):
        # Internal target dicts
        cn = self.parent.channel()
        rows = []

        
        # Direct/internal row import
        if data and info >= 51:
            log.DND("Received row, append, reload")
            rows += [ json.loads(data) ]

        # Convertible formats
        elif data and info >= 5:
            cnv = action.extract_playlist(data)
            urls = cnv.format(self.cnv_types[info] if info>=20 else "raw")
            rows += [ self.imported_row(urls[0]) ]

        # Extract from playlist files (don't import mp3s into stream lists directly)
        elif urls:
            for fn in [re.sub("^\w+://[^/]*", "", fn) for fn in urls if re.match("^(scp|file)://(localhost)?/|/", fn)]:
                ext = action.probe_playlist_fn_ext(fn)
                if ext:
                    cnt = open(fn, "rt").read()
                    probe = action.probe_playlist_content(cnt)
                    if ext == probe:
                        cnv = action.extract_playlist(cnt)
                        urls = cnv.format(probe)
                        rows += [ self.imported_row(urls[0], os.path.basename(fn)) ]
        
        # Insert and update view
        if rows:
            # Inserting at correct row requires deducing index from dnd `y` position
            cn.streams[cn.current] += rows




            # Now appending to the liststore directly would be even nicer
            uikit.do(cn.load, cn.current)
            if cn.module == "bookmarks":
                cn.save()
            #self.parent.streamedit()
        else:
            self.parent.status("Unsupported station format. Not imported.")




    def imported_row(self, url, title=None):
        return {
            "title": title or "",
            "url": url,
            "homepage": "",
            "playling": "",
            "listformat": action.probe_playlist_fn_ext(url) or "href",
            "format": ",".join(re.findall("ogg|mpeg|mp\d+", url)),
            "genre": "copy",
        }

        







<
|
<
|











>
|









|

|

|

|





|




|
>
>
>
>

|







>
>












212
213
214
215
216
217
218

219

220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287

        # incoming data
        data = selection.get_text()
        urls = selection.get_uris()
        any = (data or urls) and True

        # Convert/Add

        if any: self.import_row(info, urls, data, y)

        else: log.DND("Abort, no urls/text.")
        
        # Respond
        context.drop_finish(any, time)
        context.finish(any, False, time)
        return True

    # Received files or payload has to be converted, copied into streams
    def import_row(self, info, urls, data, y=5000):
        # Internal target dicts
        cn = self.parent.channel()
        rows = []
        print info
                
        # Direct/internal row import
        if data and info >= 51:
            log.DND("Received row, append, reload")
            rows += [ json.loads(data) ]

        # Convertible formats
        elif data and info >= 5:
            cnv = action.extract_playlist(data)
            urls = cnv.format(self.cnv_types[info] if info>=20 else "raw")
            rows += [ self.imported_row(urls[0], cnv.title()) ]

        # Extract from playlist files, either passed as text/uri-list or FILE_NAME
        elif urls:
            for fn in [re.sub("^\w+://[^/]*", "", fn) for fn in urls or [data] if re.match("^(scp|file)://(localhost)?/|/", fn)]:
                ext = action.probe_playlist_fn_ext(fn)
                if ext:  # don't import mp3s into stream lists directly
                    cnt = open(fn, "rt").read()
                    probe = action.probe_playlist_content(cnt)
                    if ext == probe:
                        cnv = action.extract_playlist(cnt)
                        urls = cnv.format(probe)
                        rows += [ self.imported_row(urls[0], cnv.title() or os.path.basename(fn)) ]
        
        # Insert and update view
        if rows:
            # Inserting at correct row requires deducing index from dnd `y` position
            streams = cn.streams[cn.current]
            i_pos = (cn.gtk_list.get_path_at_pos(10, y) or [[len(streams) + 1]])[0][0]
            for row in rows:
                streams.insert(i_pos - 1, row)
                i_pos = i_pos + 1
            # Now appending to the liststore directly would be even nicer
            uikit.do(lambda *x: cn.load(cn.current))#, cn.gtk_list.scroll_to_point(0, y))
            if cn.module == "bookmarks":
                cn.save()
            #self.parent.streamedit()
        else:
            self.parent.status("Unsupported station format. Not imported.")


    # Stub row for dragged entries.
    # Which is a workaround for the missing full playlist conversion and literal URL input
    def imported_row(self, url, title=None):
        return {
            "title": title or "",
            "url": url,
            "homepage": "",
            "playling": "",
            "listformat": action.probe_playlist_fn_ext(url) or "href",
            "format": ",".join(re.findall("ogg|mpeg|mp\d+", url)),
            "genre": "copy",
        }