|
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51 | # Still needs some rewrites to transition off the [url] lists,
# and work with full [rows] primarily. (And perhaps it should be
# renamed to "playlist" module now).
import re
import os
from ahttp import fix_url as http_fix_url, session
from config import *
import platform
import copy
import json
from datetime import datetime
from xml.sax.saxutils import escape as xmlentities, unescape as xmlunescape
# Coupling to main window
#
main = None
# Streamlink/listformat mapping |
<
<
>
>
>
| 30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52 | # Still needs some rewrites to transition off the [url] lists,
# and work with full [rows] primarily. (And perhaps it should be
# renamed to "playlist" module now).
import re
import os
import platform
import copy
import json
from datetime import datetime
from xml.sax.saxutils import escape as xmlentities, unescape as xmlunescape
import ahttp
from config import *
# Coupling to main window
#
main = None
# Streamlink/listformat mapping |
|
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195 | for match in [ fmt, major + "/*", "*/*", "video/*", "audio/*" ]:
if cmd_list.get(match):
return cmd_list[match]
log.ERR("No audio player for stream type found")
# Replaces instances of %m3u, %pls, %srv in a command string.
# · Also understands short aliases %l, %f, %d.
# · And can embed %title or %genre placeholders.
# · Replace .pls URL with local .m3u file depending on map.
#
def interpol(cmd, url, source="pls", row={}):
# Inject other meta fields (%title, %genre, %playing, %format, etc.) |
|
>
| 182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197 | for match in [ fmt, major + "/*", "*/*", "video/*", "audio/*" ]:
if cmd_list.get(match):
return cmd_list[match]
log.ERR("No audio player for stream type found")
# Replaces instances of %m3u, %pls, %srv in a command string
# ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾
# · Also understands short aliases %l, %f, %d.
# · And can embed %title or %genre placeholders.
# · Replace .pls URL with local .m3u file depending on map.
#
def interpol(cmd, url, source="pls", row={}):
# Inject other meta fields (%title, %genre, %playing, %format, etc.) |
|
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224 | url = convert_playlist(url, listfmt(source), listfmt(dest), local_file=True, row=row)
# insert quoted URL/filepath
return re.sub(rx, quote(url), cmd, 2, re.X)
return "/bin/false"
# Substitute .pls URL with local .m3u, or direct srv addresses, or leaves URL asis.
# · Takes a single input `url` (and original row{} as template).
# · But returns a list of [urls] after playlist extraction.
# · If repackaging as .m3u/.pls/.xspf, returns the local [fn].
#
def convert_playlist(url, source, dest, local_file=True, row={}):
urls = []
log.PROC("convert_playlist(", url, source, dest, ")") |
|
>
>
>
>
>
| 212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231 | url = convert_playlist(url, listfmt(source), listfmt(dest), local_file=True, row=row)
# insert quoted URL/filepath
return re.sub(rx, quote(url), cmd, 2, re.X)
return "/bin/false"
# Substitute streaming address with desired playlist format
# ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾
# Converts input rows/urls, probes for playlist format, fetches them
# and possibly converts remote .pls to local .m3u/.xpsf filename or
# just returns direct "srv" urls.
#
# · Takes a single input `url` (and original row{} as template).
# · But returns a list of [urls] after playlist extraction.
# · If repackaging as .m3u/.pls/.xspf, returns the local [fn].
#
def convert_playlist(url, source, dest, local_file=True, row={}):
urls = []
log.PROC("convert_playlist(", url, source, dest, ")") |
|
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290 |
# Tries to fetch a resource, aborts on ICY responses.
#
def http_probe_get(url):
# HTTP request, abort if streaming server hit (no HTTP/ header, but ICY/ response)
try:
r = session.get(url, stream=True, timeout=5.0)
if not len(r.headers):
return ("srv", r)
except:
return ("srv", None)
# Extract payload
mime = r.headers.get("content-type", "href") |
|
| 283
284
285
286
287
288
289
290
291
292
293
294
295
296
297 |
# Tries to fetch a resource, aborts on ICY responses.
#
def http_probe_get(url):
# HTTP request, abort if streaming server hit (no HTTP/ header, but ICY/ response)
try:
r = ahttp.session.get(url, stream=True, timeout=5.0)
if not len(r.headers):
return ("srv", r)
except:
return ("srv", None)
# Extract payload
mime = r.headers.get("content-type", "href") |
|
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316 |
# Rejoin into string
content = "\n".join(str.decode(errors='replace') for str in r.iter_lines())
return (mime, content)
# Extract URLs and meta infos (titles) from playlist formats.
#
# It's mostly regex-based at the moment, because that's more
# resilient against mailformed XSPF or JSON. But specialized
# import helpers can be added as needed.
#
class extract_playlist(object):
# Content of playlist file |
|
|
| 308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323 |
# Rejoin into string
content = "\n".join(str.decode(errors='replace') for str in r.iter_lines())
return (mime, content)
# Extract URLs and meta infos (titles) from playlist formats
# ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾
# It's mostly regex-based at the moment, because that's more
# resilient against mailformed XSPF or JSON. But specialized
# import helpers can be added as needed.
#
class extract_playlist(object):
# Content of playlist file |
|
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369 | log.DATA("input extractor/regex:", fmt, len(self.src))
# specific extractor implementations
if hasattr(self, fmt):
try:
return getattr(self, fmt)()
except Exception as e:
log.WARN("Native {} parser failed on input (improper encoding, etc)".format(fmt), e)
# regex scheme
rules = self.extr_urls[fmt]
rows = []
fields = [name for name in ("url", "title", "homepage", "genre", "playing") if rules.get(name)]
# Block-wise processing
if rules.get("split"):
for part_src in re.split(rules["split"], self.src, 0, re.X): |
|
>
>
>
| 360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379 | log.DATA("input extractor/regex:", fmt, len(self.src))
# specific extractor implementations
if hasattr(self, fmt):
try:
return getattr(self, fmt)()
except Exception as e:
log.WARN("Native '{}' parser failed on input (improper encoding, etc)".format(fmt), e)
# regex scheme
if not fmt in self.extr_urls:
log.ERR("Unknown playlist format type '{}' - falling back to 'raw' mode".format(fmt))
fmt = "raw"
rules = self.extr_urls[fmt]
rows = []
fields = [name for name in ("url", "title", "homepage", "genre", "playing") if rules.get(name)]
# Block-wise processing
if rules.get("split"):
for part_src in re.split(rules["split"], self.src, 0, re.X): |
|
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554 | video = re.findall("(mp4|flv|avi|mp2|theora|3gp|nsv|fli|ogv|webm|mng|mxu|wmv|mpv|mkv)", url)
if audio:
return "video/{}".format(*audio)
return "x-audio-video/unknown"
# Save rows[] in one of the export formats.
#
# → The export() version uses urls[] and a template row{} as input,
# converts it into a list of complete rows{} beforehand. It's mostly
# utilized to expand a source playlist, merge in alternative streaming
# server addresses.
#
# → With store() a full set of rows[] is required to begin with, as
# it performs a complete serialization. Can save directly to a file. |
|
|
| 549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564 | video = re.findall("(mp4|flv|avi|mp2|theora|3gp|nsv|fli|ogv|webm|mng|mxu|wmv|mpv|mkv)", url)
if audio:
return "video/{}".format(*audio)
return "x-audio-video/unknown"
# Save rows[] in one of the export formats
# ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾
# → The export() version uses urls[] and a template row{} as input,
# converts it into a list of complete rows{} beforehand. It's mostly
# utilized to expand a source playlist, merge in alternative streaming
# server addresses.
#
# → With store() a full set of rows[] is required to begin with, as
# it performs a complete serialization. Can save directly to a file. |
|
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638 |
# M3U
def m3u(self, rows):
txt = "#EXTM3U\n"
for r in rows:
txt += "#EXTINF:-1,%s\n" % r["title"]
txt += "%s\n" % http_fix_url(r["url"])
return txt
# PLS
def pls(self, rows):
txt = "[playlist]\n" + "NumberOfEntries=%s\n" % len(rows)
for i,r in enumerate(rows):
txt += "File%s=%s\nTitle%s=%s\nLength%s=%s\n" % (i+1, r["url"], i+1, r["title"], i+1, -1) |
|
| 634
635
636
637
638
639
640
641
642
643
644
645
646
647
648 |
# M3U
def m3u(self, rows):
txt = "#EXTM3U\n"
for r in rows:
txt += "#EXTINF:-1,%s\n" % r["title"]
txt += "%s\n" % ahttp.fix_url(r["url"])
return txt
# PLS
def pls(self, rows):
txt = "[playlist]\n" + "NumberOfEntries=%s\n" % len(rows)
for i,r in enumerate(rows):
txt += "File%s=%s\nTitle%s=%s\nLength%s=%s\n" % (i+1, r["url"], i+1, r["title"], i+1, -1) |
|