Index: pluginconf/depends.py ================================================================== --- pluginconf/depends.py +++ pluginconf/depends.py @@ -7,45 +7,48 @@ # depends: pluginconf >= 0.7 # version: 0.5 # state: beta # license: PD # priority: optional +# permissive: 0.8 # # This is a rather basic depends: checker, mostly for local and # installable modules. It's largely built around streamtuner2 # requirements, and should be customized. # -# DependencyValidation().depends()/.valid() -# ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ +# Check().depends()/.valid() +# ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ # Probes a new plugins` depends: list against installed base modules. # Utilizes each version: fields and allows for virtual modules, or # alternatives and honors alias: names. # +""" Dependency validation and consistency checker for updates """ + -import pluginconf +import sys import re +#import zipfile +import logging +import pluginconf try: from distutils.spawn import find_executable except ImportError: try: from compat2and3 import find_executable except ImportError: - def find_executable(name): - pass -import zipfile -import logging + find_executable = lambda name: False # Minimal depends: probing # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ -class DependencyValidation(object): +class Check(): """ Now this definitely requires customization. Each plugin can carry a list of (soft-) dependency names. - # depends: config, appcore >= 2.0, bin:wkhtmltoimage, python < 3.5 + … # depends: config, appcore >= 2.0, bin:wkhtmltoimage, python < 3.5 Here only in-application modules are honored, system references ignored. Unknown plugin names are also skipped. A real install helper might want to auto-tick them on, etc. This example is just meant for probing downloadable plugins. @@ -54,97 +57,136 @@ modules, and if they're more recent. While .depends() compares minimum versions against existing modules. In practice there's little need for full-blown dependency resolving for application-level modules. + + | Attributes | | | + |------------|---------|-----------------------------------------------------| + | api | list | allowed api: identifiers for .valid() stream checks | + | system_deps| bool | check `bin:app` or `python:package` dependencies | + | log | logging | warning handler | + | have | dict | accumulated list of existing/virtual plugins | """ - - """ supported APIs """ + + # supported APIs api = ["python", "streamtuner2"] - - """ debugging """ + + # debugging log = logging.getLogger("pluginconf.dependency") - # prepare list of known plugins and versions - def __init__(self, add={}, core=["st2", "uikit", "config", "action"]): + # ignore bin:… or python:… package in depends + system_deps = False + + def __init__(self, add=None, core=["st2", "uikit", "config", "action"]): + """ + Prepare list of known plugins and versions in self.have={} + + | Parameters | | | + |------------|---------|------------------------------------------------------| + | add | dict | name→pmd of existing/core plugins (incl ver or deps) | + | core | list | name list of virtual plugins | + """ self.have = { "python": {"version": sys.version} } # inject virtual modules - for name, meta in add.items(): + for name, meta in (add or {}).items(): if isinstance(meta, bool): meta = 1 if meta else -1 if isinstance(meta, tuple): meta = ".".join(str(n) for n in meta) if isinstance(meta, (int, float, str)): meta = {"version": str(meta)} self.have[name] = meta # read plugins/* - self.have.update(all_plugin_meta()) + self.have.update(pluginconf.all_plugin_meta()) # add core modules for name in core: - self.have[name] = plugin_meta(module=name, extra_base=["config"]) + self.have[name] = pluginconf.plugin_meta(module=name, extra_base=["config"]) # aliases for name, meta in self.have.copy().items(): if meta.get("alias"): for alias in re.split(r"\s*[,;]\s*", meta["alias"]): self.have[alias] = self.have[name] - # basic plugin pre-screening (skip __init__, filter by api:, - # exclude installed & same-version plugins) def valid(self, new_plugin): - id = new_plugin.get("$name", "__invalid") - have_ver = self.have.get(id, {}).get("version", "0") - if id.find("__") == 0: + """ + Plugin pre-screening from online repository stream. + Fields are $name, $file, $dist, api, id, depends, etc + Exclude installed or for newer-version presence. + + | Parameters | | | + |-------------|---------|------------------------------------------------------| + | new_plugin | dict | online properties of available plugin | + | **Returns** | bool | is updatatable | + """ + if not "$name" in new_plugin: + self.log.warning(".valid() checks online plugin lists, requires $name") + name = new_plugin.get("$name", "__invalid") + have_ver = self.have.get(name, {}).get("version", "0") + if name.find("__") == 0: self.log.debug("wrong/no id") elif new_plugin.get("api") not in self.api: self.log.debug("not in allowed APIs") elif {new_plugin.get("status"), new_plugin.get("priority")} & {"obsolete", "broken"}: self.log.debug("wrong status (obsolete/broken)") elif have_ver >= new_plugin.get("version", "0.0"): self.log.debug("newer version already installed") else: return True + return False - # Verify depends: and breaks: against existing plugins/modules def depends(self, plugin): + """ + Verify depends: and breaks: against existing plugins/modules + + | Parameters | | | + |-------------|---------|------------------------------------------------------| + | plugin | dict | plugin meta properties of (new?) plugin | + | **Returns** | bool | matches up with existing .have{} installation | + """ result = True if plugin.get("depends"): result &= self.and_or(self.split(plugin["depends"]), self.have) if plugin.get("breaks"): result &= self.neither(self.split(plugin["breaks"]), self.have) self.log.debug("plugin '%s' matching requirements: %i", plugin["id"], result) return result - # Split trivial "pkg | alt, mod>=1, uikit<4.0" string into nested list [[dep],[alt,alt],[dep]] def split(self, dep_str): + """ + Split trivial "pkg | alt, mod>=1, uikit<4.0" string + into nested list [ [alt, alt], [dep], [dep] ]; + with each entry comprised of (name, operator, version). + """ dep_cmp = [] for alt_str in re.split(r"\s*[,;]+\s*", dep_str): alt_cmp = [] # split alternatives | for part in re.split(r"\s*\|+\s*", alt_str): # skip deb:pkg-name, rpm:name, bin:name etc. - if not len(part): + if not part: continue if part.find(":") >= 0: self.have[part] = {"version": self.module_test(*part.split(":"))} # find comparison and version num part += " >= 0" - m = re.search(r"([\w.:-]+)\s*\(?\s*([>==": curr > ver, "<": curr < ver, "!=": curr != ver, } - r = tbl.get(op, True) - #print "log.VERSION_COMPARE: ", name, " → (", curr, op, ver, ") == ", r - return r + result = tbl.get(operator, True) + self.log.debug("VERSION_COMPARE: %s → (%s %s %s) == %s", name, curr, operator, ver, result) + return result - # Compare nested structure of [[dep],[alt,alt]] - def and_or(self, deps, have, r=True): + def and_or(self, deps, have, inner_true=True): + """ Compare nested structure of [[dep],[alt,alt]] """ #print deps return not False in [ - True in [self.cmp(d, have) for d in alternatives] for alternatives in deps + inner_true in [self.cmp(d, have) for d in alternatives] for alternatives in deps ] - # Breaks/Conflicts: check [[or],[or]] def neither(self, deps, have): + """ Breaks/Conflicts: check [[or],[or]] """ return not True in [ self.cmp(d, have, absent=None) for cnd in deps for d in cnd ] - # Resolves/injects complex "bin:name" or "python:name" dependency URNs - def module_test(self, type, name): - return "1" # disabled for now - if "_" + type in dir(self): - return "1" if bool(getattr(self, "_" + type)(name)) else "-1" - - # `bin:name` lookup - def _bin(self, name): + def module_test(self, urn, name): + """ Probes "bin:name" or "python:name" dependency URNs """ + if not self.system_deps: + return "1" + if "_" + urn in dir(self): + if bool(getattr(self, "_" + urn)(name)): + return "1" + return "-1" # basically a negative version -v1 + + @staticmethod + def _bin(name): + """ `bin:name` lookup """ return find_executable(name) - # `python:module` test - def _python(self, name): + @staticmethod + def _python(name): + """ `python:module` test """ return __import__("imp").find_module(name) is not None - Index: pluginconf/pluginconf.py ================================================================== --- pluginconf/pluginconf.py +++ pluginconf/pluginconf.py @@ -2,22 +2,31 @@ # api: python ##type: extract # category: config # title: Plugin configuration # description: Read meta data, pyz/package contents, module locating -# version: 0.7.7 +# version: 0.8.1 # state: stable # classifiers: documentation +# depends: python >= 2.7 +# suggests: python:flit, python:PySimpleGUI # license: PD # priority: core +# api-docs: https://fossil.include-once.org/pluginspec/doc/trunk/html/index.html # docs: https://fossil.include-once.org/pluginspec/ -# url: http://fossil.include-once.org/streamtuner2/wiki/plugin+meta+data +# url: https://fossil.include-once.org/pluginspec/wiki/pluginconf # config: - +# format: off +# permissive: 0.75 +# pylint: disable=invalid-name +# console-scripts: flit-pluginconf=pluginconf.flit:main # # Provides plugin lookup and meta data extraction utility functions. # It's used to abstract module+option management in applications. # For consolidating internal use and external/tool accessibility. +# Generally these functions are highly permissive / error tolerant, +# to preempt initialization failures for applications. # # The key:value format is language-agnostic. It's basically YAML in # a topmost script comment. For Python only # hash comments though. # Uses common field names, a documentation block, and an obvious # `config: { .. }` spec for options and defaults. @@ -32,10 +41,13 @@ # plugin_meta() # ‾‾‾‾‾‾‾‾‾‾‾‾‾ # Is the primary function to extract a meta dictionary from files. # It either reads from a given module= name, a literal fn=, or just # src= code, and as fallback inspects the last stack frame= else. +# +# The resulting dict allows [key] and .key access. The .config +# list further access by option .name. # # module_list() # ‾‾‾‾‾‾‾‾‾‾‾‾‾ # Returns basenames of available/installed plugins. It uses the # plugin_base=[] list for module relation. Which needs to be set up @@ -58,29 +70,50 @@ # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾ # Converts a list of config: options with arg: attribute for use as # argparser parameters. # # -# Generally this scheme concerns itself more with plugin basenames. -# That is: module scripts in a package like `ext.plg1` and `ext.plg2`. -# It can be initialized by injecting the plugin-package basename into -# plugin_base = []. The associated paths will be used for module -# lookup via pkgutil.iter_modules(). -# -# And a central module can be extended with new lookup locations best -# by attaching new locations itself via module.__path__ + ["./local"] -# for example. -# -# Plugin loading thus becomes as simple as __import__("ext.local"). -# The attached plugin_state config dictionary in most cases can just -# list module basenames, if there's only one set to manage. +# Simple __import__() scheme +# ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ +# Generally this scheme concerns itself more with plugin basenames. +# That is: module scripts in a package like `plugins.plg1`. To do so, +# have an `plugins/__init__.py` which sets its own `__path__`. +# Inject that package name into `plugin_base = ["plugins"]`. Thus +# any associated paths can be found per pkgutil.iter_modules(). +# +# Importing modules then also becomes as simple as invoking +# `module = __import__(f"plugins.{basename}"]` given a plugin name. +# The "plugins" namespace can subsequently be expanded by attaching +# more paths, such as `+= ["./config/usermodules"]` or similiar. +# +# Thus a plugin_state config dictionary in most cases can just list +# module basenames, if there's only one namespace to manage. (Plugin +# names unique across application.) + +""" +Plugin meta extraction and module lookup. + + +
+ + +
  • Main function plugin_meta() unpacks meta fields + into dictionaries. +
  • Other utility code is about module listing, relative to + plugin_base anchors. +
  • //pypi.org/project/pluginconf/ +
  • //fossil.include-once.org/pluginspec/ +
  • +""" import sys import os +import os.path import re import functools +import itertools import pkgutil import inspect try: from gzip import decompress as gzip_decode # Py3 only except ImportError: @@ -91,33 +124,51 @@ def gzip_decode(bytestr): """ haphazard workaround """ return zlib.decompress(bytestr, 16 + zlib.MAX_WBITS) import zipfile import argparse +import logging +#logging.basicConfig(level=logging.DEBUG) __all__ = [ - "get_data", "module_list", "plugin_meta", "add_plugin_defaults" + "plugin_meta", "get_data", "module_list", "add_plugin_defaults", + "PluginMeta", "OptionList", "all_plugin_meta", + "data_root", "plugin_base", "config_opt_type_map", ] # Injectables # ‾‾‾‾‾‾‾‾‾‾‾ +log = logging.getLogger("pluginconf") """ injectable callback function for logging """ -log_ERR = lambda *x: None +data_root = "config" # inspect.getmodule(sys._getframe(1)).__name__ """ File lookup relation for get_data(), should name a top-level package. -(Equivalent PluginBase(package=…)) -""" -module_base = "config" - -""" -Package/module names for module_list() and plugin_meta() lookups. -All associated paths will be scanned for module/plugin basenames. -(Equivalent to `searchpath` in PluginBase) -""" -plugin_base = ["channels"] +(Equivalent to `PluginBase(package=…)`) +""" + +plugin_base = ["plugins"] +""" +Package/module names (or directories) for module_list() and plugin_meta() +lookups. Associated paths (`__path__`) will be scanned for module/plugin +basenames. (Similar to `PluginBase(searchpath=…)`) +""" + +config_opt_type_map = { + "longstr": "text", + "string": "str", + "boolean": "bool", + "checkbox": "bool", + "integer": "int", + "number": "int", + "choice": "select", + "options": "select", + "table": "dict", + "array": "dict" +} +""" normalize config type: names to `str`, `text`, `bool`, `int`, `select`, `dict` """ # Compatiblity # ‾‾‾‾‾‾‾‾‾‾‾‾ def renamed_arguments(renamed): @@ -134,191 +185,261 @@ # Resource retrieval # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ @renamed_arguments({"fn": "filename", "gz": "gzip"}) -def get_data(filename, decode=False, gzip=False, file_base=None): +def get_data(filename, decode=False, gzip=False, file_root=None, warn=True): """ Fetches file content from install path or from within PYZ archive. This is just an alias and convenience wrapper for pkgutil.get_data(). - Utilizes the module_base / plugin_base as top-level reference. + Utilizes the data_root as top-level reference. - :arg str fn: filename in pyz or bundle - :arg bool decode: text file decoding utf-8 - :arg bool gz: automatic gzdecode - :arg str file_base: alternative base module reference + | Parameters | | | + |-------------|---------|----------------------------| + | filename | str | filename in pyz or bundle | + | decode | bool | text file decoding utf-8 | + | gzip | bool | automatic gzdecode | + | file_root | list | alternative base module (application or pyz root) | + | **Returns** | str | file contents | """ try: - data = pkgutil.get_data(file_base or module_base, filename) + data = pkgutil.get_data(file_root or data_root, filename) if gzip: data = gzip_decode(data) if decode: return data.decode("utf-8", errors='ignore') return str(data) - except: - # log_ERR("get_data() didn't find:", fn, "in", file_base) - pass + except: #(FileNotFoundError, IOError, OSError, ImportError, gzip.BadGzipFile): + if warn: + log.warning("get_data() didn't find '%s' in '%s'", filename, file_root) # Plugin name lookup # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ def module_list(extra_paths=None): """ Search through ./plugins/ (and other configured plugin_base names → paths) and get module basenames. - :arg list extra_paths: in addition to plugin_base list + | Parameter | | | + |-------------|---------|---------------------------------| + | extra_paths | list | in addition to plugin_base list | + | **Returns** | list | names of found plugins | """ # Convert plugin_base package names into paths for iter_modules paths = [] - for mp in plugin_base: - if sys.modules.get(mp): - paths += sys.modules[mp].__path__ - elif os.path.exists(mp): - paths.append(mp) + for module_or_path in plugin_base: + if sys.modules.get(module_or_path): + try: + paths += sys.modules[module_or_path].__path__ + except AttributeError: + paths += os.path.dirname(os.path.realpath( + sys.modules[module_or_path] + )) + elif os.path.exists(module_or_path): + paths.append(module_or_path) # Should list plugins within zips as well as local paths - ls = pkgutil.iter_modules(paths + (extra_paths or [])) - return [name for loader, name, ispkg in ls] + dirs = pkgutil.iter_modules(paths + (extra_paths or [])) + return [name for loader, name, ispkg in dirs] # Plugin => meta dict # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ def all_plugin_meta(): """ This is a trivial wrapper to assemble a complete dictionary of available/installed plugins. It associates each plugin name with a its meta{} fields. + + | Parameters | | | + |-------------|---------|---------------------------------| + | **Returns** | dict | names to `PluginMeta` dict | """ return { name: plugin_meta(module=name) for name in module_list() } # Plugin meta data extraction # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ -@renamed_arguments({"filename": "fn"}) -def plugin_meta(fn=None, src=None, module=None, frame=1, **kwargs): +@renamed_arguments({"fn": "filename"}) +def plugin_meta(filename=None, src=None, module=None, frame=1, **kwargs): """ - Extract plugin meta data block from different sources: - - :arg str fn: read literal files, or .pyz contents - :arg str src: from already uncovered script code - :arg str module: lookup per pkgutil, from plugin_base or top-level modules - :arg int frame: extract comment header of caller (default) - :arg list extra_base: additional search directories - :arg ist max_length: maximum size to read from files + Extract plugin meta data block from specified source. + + | Parameters | | | + |-------------|---------|-------------------------------------------------| + | filename | str | Read literal files, or .pyz contents. | + | src | str | From already uncovered script code. | + | module | str | Lookup per pkgutil, relative to plugin_base | + | frame | int | Extract comment header of caller (default). | + | extra_base | list | Additional search directories. | + | max_length | list | Maximum size to read from files (6K default). | + | **Returns** | dict | Extracted comment fields, with config: preparsed| + + The result dictionary (`PluginMeta`) has fields accessible as e.g. `meta["title"]` + or `meta.version`. The documentation block after all fields: is called + `meta["doc"]`. + And `meta.config` already parsed as a list (`OptionList`) of dictionaries. """ # Try via pkgutil first, # find any plugins.* modules, or main packages if module: - fn = module - for base in plugin_base + kwargs.get("extra_base", []): + search = plugin_base + kwargs.get("extra_base", []) + for base, sfx in itertools.product(search, [".py", "/__init__.py"]): try: - src = get_data(fn=fn+".py", decode=True, file_base=base) + #log.debug(f"mod={base} fn={filename}.py") + src = get_data(filename=module+sfx, decode=True, file_root=base, warn=False) if src: break - except: + except (IOError, OSError, FileNotFoundError): continue # plugin_meta_extract() will print a notice later + else: + log.warning("Found no source candidate for '%s'", module) + filename = module # Real filename/path - elif fn and os.path.exists(fn): - src = open(fn).read(kwargs.get("max_length", 6144)) + elif filename and os.path.exists(filename): + src = open(filename).read(kwargs.get("max_length", 6144)) # Else get source directly from caller - elif not src and not fn: + elif not src and not filename: module = inspect.getmodule(sys._getframe(frame+1)) # decorator+1 - fn = inspect.getsourcefile(module) + filename = inspect.getsourcefile(module) src = inspect.getcomments(module) - # Assume it's a filename within a zip - elif fn: - intfn = "" - while fn and len(fn) and not os.path.exists(fn): - fn, add = os.path.split(fn) - intfn = add + "/" + intfn - if len(fn) >= 3 and intfn and zipfile.is_zipfile(fn): - src = zipfile.ZipFile(fn, "r").read(intfn.strip("/")) + # Assume it's a filename matching …/base.zip/…/int.py + elif filename: + int_fn = "" + while len(filename) and not os.path.exists(filename): # pylint: disable=len-as-condition + filename, add = os.path.split(filename) + int_fn = add + "/" + int_fn + if len(filename) >= 3 and int_fn and zipfile.is_zipfile(filename): + src = zipfile.ZipFile(filename, "r").read(int_fn.strip("/")) # Extract source comment into meta dict if not src: src = "" - if not isinstance(src, str): - src = src.decode("utf-8", errors='replace') - return plugin_meta_extract(src, fn) + if hasattr(src, "decode"): + try: + src = src.decode("utf-8", errors='replace') + except UnicodeDecodeError: + pass + return plugin_meta_extract(src, filename) # Comment and field extraction logic # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ -def plugin_meta_extract(src="", fn=None, literal=False): +@renamed_arguments({"fn": "filename"}) +def plugin_meta_extract(src="", filename=None, literal=False): """ Finds the first comment block. Splits key:value header fields from comment. Turns everything into an dict, with - some stub fields if absent. + some stub fields if absent. Dashes substituted for underscores. - :arg str src: from existing source code - :arg int fn: set filename attribute - :arg bool literla: just split comment from doc + | Parameters | | | + |-------------|---------|---------------------------------| + | src | str | from existing source code | + | filename | str | set filename attribute | + | literal | bool | just split comment from doc | + | **Returns** | dict | fields | """ # Defaults meta = { - "id": os.path.splitext(os.path.basename(fn or ""))[0], - "fn": fn, + "id": os.path.splitext(os.path.basename(filename or ""))[0], + "fn": filename, "api": "python", "type": "module", "category": None, "priority": None, "version": "0", - "title": fn, + "title": filename, "description": "no description", "config": [], "doc": "" } # Extract coherent comment block src = src.replace("\r", "") if not literal: + src = rx.header.sub("", src) src = rx.comment.search(src) if not src: - log_ERR("Couldn't read source meta information:", fn) + log.warning("Couldn't read source meta information: %s", filename) return meta - src = src.group(0) - src = rx.hash.sub("", src).strip() + src = src[1] or src[2] or src[3] or src[4] + src = rx.hash(src).sub("", src).strip() # Split comment block if src.find("\n\n") > 0: src, meta["doc"] = src.split("\n\n", 1) # Turn key:value lines into dictionary for field in rx.keyval.findall(src): - meta[field[0].replace("-", "_")] = field[1].strip() + meta[field[0].replace("-", "_").lower()] = field[1].strip() meta["config"] = plugin_meta_config(meta.get("config") or "") - return meta + return PluginMeta(meta) + + +# Dict/list wrappers +# ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ +class PluginMeta(dict): + """ + Plugin meta data as dictionary`{}`, or alternatively `.property` access. + Returned for each `plugin_meta()` result, and individual `config:` options. + Absent `.field` access resolves to `""`. + """ + + def __getattr__(self, key, default=""): + """ Return [key] for .property access, else `""`. """ + if key == "config": + default = OptionList() + return self.get(key, default) + + def __setattr__(self, key, val): + """ Shouldn't really have this, but for parity. """ + self[key] = val + +class OptionList(list): + """ + List of `config:` options, with alernative `.name` access (lookup by name= from option entry). + """ + + def __getattr__(self, key): + """ Returns list entry with name= equaling .name access """ + for opt in self: + if opt.name == key: + return opt + raise KeyError("No option name '%s' in config list" % key) # Unpack config: structures # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ -def plugin_meta_config(str): +def plugin_meta_config(src): """ Further breaks up the meta['config'] descriptor. Creates an array from JSON/YAML option lists. - :arg str str: unprocessed config: field - Stubs out name, value, type, description if absent. # config: { name: 'var1', type: text, value: "default, ..." } { name=option2, type=boolean, $value=1, etc. } + + | Parameters | | | + |-------------|---------|--------------------------------------| + | src | str | unprocessed config: field | + | **Returns** | list | of option dictionaries | """ config = [] - for entry in rx.config.findall(str): + for entry in rx.config.findall(src): entry = entry[0] or entry[1] opt = { "type": None, "name": None, "description": "", @@ -330,24 +451,20 @@ opt["type"] = config_opt_type_map.get(opt["type"], opt["type"] or "str") # preparse select: if opt.get("select"): opt["select"] = config_opt_parse_select(opt.get("select", "")) config.append(opt) - return config + + return OptionList(PluginMeta(opt) for opt in config) # split up `select: 1=on|2=more|3=title` or `select: foo|bar|lists` -def config_opt_parse_select(s): - if re.search("([=:])", s): - return dict(rx.select_dict.findall(s)) - else: - return dict([(v, v) for v in rx.select_list.findall(s)]) - -# normalize type:names to `str`, `text`, `bool`, `int`, `select`, `dict` -config_opt_type_map = dict( - longstr="text", string="str", boolean="bool", checkbox="bool", integer="int", number="int", - choice="select", options="select", table="dict", array="dict" -) +def config_opt_parse_select(select): + """ unpack 1|2|3 or title=lists """ + if re.search("([=:])", select): + return dict(rx.select_dict.findall(select)) + #else: + return {val: val for val in rx.select_list.findall(select)} # Comment extraction regexps # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ class rx: @@ -355,30 +472,63 @@ Pretty crude comment splitting approach. But works well enough already. Technically a YAML parser would do better; but is likely overkill. """ - comment = re.compile(r"""(^ {0,4}#.*\n)+""", re.M) - hash = re.compile(r"""(^ {0,4}#{1,2} {0,3}\r*)""", re.M) + header = re.compile(r""" + (\A ( + \#! \s+ /.+ | # shebang + <\?php .* + ) $)+ + """, re.M | re.X) + comment = re.compile(r""" + ((?:^ [ ]{0,4} (\#|//) .*\n)+) | # general + /\*+ ([\s\S]+?) \*/ | # C-multiline + <\# ([\s\S]+?) \#> | # PS + \{- ([\s\S]+?) -\} # Haskell + """, re.M | re.X) + hash_det = re.compile(r""" + ^ ([ \t]*) ([#*/]*) ([ ]*) [\w-]*: # determine indent, craft strip regex + """, re.M | re.X) keyval = re.compile(r""" - ^([\w-]+):(.*$(?:\n(?![\w-]+:).+$)*) # plain key:value lines + ^ ([\w-]+) : ( .*$ # plain key:value lines + (?: \n(?![\w-]+:) .+$ )* # continuation lines sans ^xyz: + ) """, re.M | re.X) config = re.compile(r""" - \{ ((?: [^\{\}]+ | \{[^\}]*\} )+) \} # JSOL/YAML scheme {...} dicts - | \< (.+?) \> # old HTML style + \{ ((?: [^\{\}]+ | \{[^\}]*\} )+) \} # JSOL/YAML scheme {...} dicts + | \< (.+?) \> # old HTML style """, re.X) options = re.compile(r""" - ["':$]? (\w*) ["']? # key or ":key" or '$key' - \s* [:=] \s* # "=" or ":" + ["':$]? (\w*) ["']? # key or ":key" or '$key' + \s* [:=] \s* # "=" or ":" (?: " ([^"]*) " - | ' ([^']*) ' # "quoted" or 'singl' values - | ([^,]*) # or unquoted literals + | ' ([^']*) ' # "quoted" or 'singl' values + | ([^,]*) # or unquoted literals ) """, re.X) - select_dict = re.compile(r"(\w+)\s*[=:>]+\s*([^=,|:]+)") - select_list = re.compile(r"\s*([^,|;]+)\s*") + select_dict = re.compile(r""" + (\w+) \s* [=:>]+ \s* ([^=,|:]+) # key=title | k2=t2 + """, re.X) + select_list = re.compile(r""" + \s*([^,|;]+)\s* # alt | lists + """, re.X) + @staticmethod + def hash(src): + """ find first comment to generate consistent strip regex for following lines """ + m = rx.hash_det.search(src) + if not m:# or not m[2]: + return re.compile("^ ? ?[#*/]{0,2} ?}", re.M) # fallback + hash_rx = "^" + if m[1]: # indent + hash_rx += m[1] + "{0,2}" # +- 1 in length? + if m[2]: # hash + hash_rx += "[" + m[2] + "]{1,%s}" % (len(m[2]) + 1) + if m[3]: # space + hash_rx += m[3] + "{0,2}" + return re.compile(hash_rx, re.M) # ArgumentParser options conversion # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ def argparse_map(opt): @@ -444,25 +594,27 @@ # Add plugin defaults to conf.* store # ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ def add_plugin_defaults(conf_options, conf_plugins, meta, module=""): """ - Utility function which collect defaults from plugin meta data to - a config store. Which in the case of streamtuner2 is really just a - dictionary `conf{}` and a plugin list in `conf.plugins{}`. - - :arg dict conf_options: storage for amassed options - :arg dict conf_plugins: enable status based on plugin state/priority: - :arg dict meta: input plugin meta data (invoke once per plugin) - :arg str module: module name of meta: block + Utility function to collect defaults from plugin meta data to + a config dict/store. + + | Parameters | | | + |-------------|---------|--------------------------------------| + | conf_options| dict 🔁 | storage for amassed #config: options | + | conf_plugins| dict 🔁 | activation status derived from state/priority: | + | meta | dict | input plugin meta data (invoke once per plugin)| + | module | str | basename of meta: blocks plugin file | + | **Returns** | None | - | """ # Option defaults, if not yet defined for opt in meta.get("config", []): if "name" not in opt or "value" not in opt: continue - _value = opt.get("value", "") + _value = opt.get("value") or "" _name = opt.get("name") _type = opt.get("type") if _name in conf_options: continue # typemap