diff --git a/README.md b/README.md index 2a4a4f8..d827745 100644 --- a/README.md +++ b/README.md @@ -1,31 +1,128 @@ -# philosophy +# Important Files -## classes -- compositional -- only init manipulates the object -- no inheretance -- functions return a clone of the object with primary data object cloned and manipulated - -## other -- heavy inference from type hints -- library-ize everything -- thin CLI wrapper as stand alone file -- pyproject setup only -- deveopment branch with pyenv to source into shell for development -- data oriented with classes being simplest data conainers with helper functions - -## imports -Files with an _prefix_ underscore are, by convention, imported in `__init__.py`. For example, - -```python -from ._rgb_ import * +``` +├── bashrc_defl.sh | +├── bin | +│   ├── ddwatch.sh | +│   └── deflMergeDevToMain.sh | +├── defl | +│   ├── *_.py | +│   ├── _*_.py | +│   ├── _path/ | +│   └── thirdParty/ | +│   └── * | +├── LICENSE.GPL | +├── pyproject.toml | +├── README.md | +├── scripts/ | +│   └── setup.sh | +└── tests/ | + └── test_*.py | ``` -This will import all of the functions from `_rgb_` into the module’s name space. So If `_rgb_` contains a function `interpolatePoint` you can reference it by `PythonModuleDemo._rgb_.interpolatePoint` or more simply `PythonModuleDemo.interpolatePoint`. Since this module is intended to provide a swath of functionality I wanted to provide the user with the simplest method of access. + -The reason all files end with a _suffix_ underscore is to avoid any name collisions with python standard Library. For instance I have a file called `_math_.py`. If it were only called `math.py` it would collide with python standard Library when doing `import math`. +# Pragdim + +- Data oriented with classes being simplest data conainers with helper functions + +## security + +- Minimal use of PyPi dependencies to reduce security overhead + - [Ultralytics AI Library Hacked via GitHub for Cryptomining](https://www.wiz.io/blog/ultralytics-ai-library-hacked-via-github-for-cryptomining) +- Use system tools installed via systempackage manager + - Safer but nothing is perfect + - [Backdoor in XZ Utils allows RCE: everything you need to know](https://www.wiz.io/blog/cve-2024-3094-critical-rce-vulnerability-found-in-xz-utils) +- `Docker` +- `netns` network namespace isolation +- When installing packges go to github page and look for an explicit `pip install ...` to avoid typosquatting due to typos. +- Don't trust PyPi + - "`Starjacking` is linking a PyPi package to an unrelated repository on GitHub that has plenty of stars, suggesting popularity. [...] there is no validation of these links on PyPi [...]." ([source](https://devclass.com/2023/10/13/pypi-repo-attack-typesquatting-starjacking-and-hidden-code-aims-to-pinch-credentials-and-secrets/)) + +## Classes + +- Dataclass with functional programming + +### Compositional + +### No Inheritance + +- Limited use of mixins. +- No super classing. +- The inheritence hierarcy is a single level. +- No object has a parent. +- https://doc.rust-lang.org/book/ch18-01-what-is-oo.html + - ``` + Inheritance has recently fallen out of favor as a programming design solution in many programming languages + because it’s often at risk of sharing more code than necessary. Subclasses shouldn’t always share all + characteristics of their parent class but will do so with inheritance. This can make a program’s design less + flexible. It also introduces the possibility of calling methods on subclasses that don’t make sense or that + cause errors because the methods don’t apply to the subclass. In addition, some languages will only allow + single inheritance (meaning a subclass can only inherit from one class), further restricting the flexibility + of a program’s design. + ``` + +### Manipulation + +- Dot Chaining + - Most functions return `self` so many functions can run on one line. + - See `Run()` class. + - Example: `Run(['ls']).run(out=F, err=T).assSuc().json()` + - `Run(['ls'])` creates the object + - `.run(out=F, err=T).assSuc()` both return `self` + - `.json()` returns a dict +- `clone()` function returns a copy of the object with primary data object duplicated and altered + - see `Dath()` class. +- `set`/`get` + - See `Find()` class. + +### Type Hints + +- inference + +## Module + +### library-ize everything + +- Thin CLI wrapper as stand alone file + +### Setup + +- `scripts/setup.sh` + +### pyproject.toml + +### Files + +- Always use underscore to avoid namespace collision + - Example: + - `import math` is a python stdlib module. + - If a file, `math.py`, exists in the source directory it will cause collision. + - Instead create `math_.py` to subvert the issue. +- `_*_.py` (i.e. `_path_.py`) + - This is always import and casted into the modules namespace. + - Imported in `__init__.py` + - Example: + - there is a Class called `Dath` in `src/defl/_path_.py` + - You can refer to it using `from dath._path_ import Dath` or simply `from defl import Dath` +- `*_.py` (i.e. `_path_.py`) + - These are not automatically imported. + - One must import them explicitly. + - Example: + - `from dath.mpv2_ import Mpv` + +## Branching + +- Deveopment branch with env to source into shell for development +- `defl`/`deflDev` +- `bin/deflMergeDevToMain.sh` + +## Unit Tests + + + +# Import Time -# import time ```bash λ timeRepeat.py - -c 500 -- python -c '' 0.021840 diff --git a/alias.toml b/alias.toml new file mode 100644 index 0000000..f0e23aa --- /dev/null +++ b/alias.toml @@ -0,0 +1,4 @@ + +[deflWhich] +allowDuplicate = true +cmd = "python -c 'import defl;print(defl.Dath(defl.__file__).gitRepoRoot())'" diff --git a/defl/_argsFromObject2_.py b/defl/_argsFromObject2_.py index ec2236e..3f99735 100644 --- a/defl/_argsFromObject2_.py +++ b/defl/_argsFromObject2_.py @@ -19,6 +19,8 @@ from ._typeCheck_ import * from rich import panel from rich.table import Table +from enum import Enum + from pydantic import create_model, ConfigDict @@ -42,6 +44,10 @@ class SublimationError(ArgFromObjError): __slot__ = () +class ReifyError(ArgFromObjError): + __slot__ = () + + class CliError(ArgFromObjError): __slot__ = () @@ -92,7 +98,19 @@ class _Flag: _._corporealization = _.default # print('bool', _) else: - _._corporealization = tc.dominent()(val) + dom = tc.dominent() + if hasattr(dom, '_value2member_map_'): + # print(type(dom), dom, val) + val = val.lower() + match = [v for k, v in dom._value2member_map_.items() if k.lower() == val] + if len(match) == 0: + raise ReifyError(f'{dom} has no value {val}') + elif len(match) > 1: + raise ReifyError(f'{dom} has multiple values for {val} {match}') + match = match[0] + _._corporealization = dom(match) + else: + _._corporealization = dom(val) return _ def sublimate(_) -> N: # | finalize the type diff --git a/defl/_basic_.py b/defl/_basic_.py index 2add766..bd1adf5 100644 --- a/defl/_basic_.py +++ b/defl/_basic_.py @@ -441,7 +441,9 @@ def dictFromListOfList(theList): class Enumer(enum.StrEnum): # def __init__(_, *args, **kargs) -> N: - # raise ValueError('Dont initialize') + # print('__init__', args, kargs) + # raise ValueError('Dont initialize') + # print(args) # super().__init__(*args, **kargs) @@ -453,6 +455,20 @@ class Enumer(enum.StrEnum): if keyStr.lower() == x.lower() or valObj is x: return valObj + # def __call__(_, *args, **kargs): + # print('__call__', args, kargs) + # return super().__call__(_, *args, **kargs) + + # def __new__(cls, value): + # # https://docs.python.org/3/library/enum.html#enum.Enum.__new__ + # # | When writing a custom __new__, do not use super().__new__ – call the appropriate __new__ instead. + # # | ...This is why inheritance sucks... + # print('__new__', cls, value) + # obj = str.__new__(cls) + # obj._value_ = value + # # obj = cls.__new__(cls, value) + # # print(obj) + # def __getattr__(*args, **kargs): # print(args, kargs) @@ -489,3 +505,21 @@ def roundRobin(*iterables): def takeCount(count, iterab): for i, x in itertools.takewhile(lambda x: x[0] < count, zip(itertools.count(), iterab)): yield x + + +K = TypeVar('K') +V = TypeVar('V') + + +def resolveKeyValToVal( + includeMap: Mapping[K, V], + include: Iterable[K | V], + defaultIncludeAll: bool = F, +) -> list[V]: + if defaultIncludeAll and not include: + return list(includeMap.values()) + includeList = [] + for k, v in includeMap.items(): + if k in include or v in include: + includeList.append(v) + return includeList diff --git a/defl/_jq_.py b/defl/_jq_.py index 7ede4a5..dcf9b5f 100644 --- a/defl/_jq_.py +++ b/defl/_jq_.py @@ -7,6 +7,7 @@ from subprocess import Popen, PIPE from dataclasses import dataclass from typing import Any, Callable, Mapping, NewType +from typing import BinaryIO from json.decoder import JSONDecodeError from .thirdParty import jstyleson from ._typing_ import * @@ -138,7 +139,7 @@ def jdumps( res, __ = sp.communicate(res.encode('utf8')) res = bytearray(res) for frm, to in jqColPost.items(): - res = res.replace(frm, to) # for key name + res = res.replace(frm, to) # | for key name if asBytes: res = res.strip(b'\n') else: @@ -199,10 +200,10 @@ def jloads(text, *args, **kargs): return jstyleson.loads(text, *args, **kargs) -def jdump(data, output: io.BytesIO = N) -> io.BytesIO: +def jdump(data, output: io.BytesIO = N, **kargs) -> io.BytesIO: if output is N: output = io.BytesIO() - json.dump(obj=jsonReprDump(data), fp=output) + json.dump(obj=jsonReprDump(data), fp=output, **kargs) return output @@ -269,6 +270,10 @@ class JqLoad: yield from _.fromFile(src=fp) +class JqDumpInputDiedError(Exception): + __slot__ = () + + @dataclass(slots=T, kw_only=T, frozen=F) class JqDump: "line oriented json save" @@ -276,34 +281,40 @@ class JqDump: # TODO JqOptions # | defl.JqDump(output=Path('file').open('ab', buffering=0)) - output: io.BytesIO = field(kw_only=F, repr=T) + output: TextIO = field(kw_only=F, repr=T) trans: str = '.' + append: bool = F _run: Popen = N def __post_init__(_): - pass - + # TODO should use https://docs.python.org/3/library/io.html#io.TextIOWrapper ? write_through=True? if hasattr(_.output, 'open'): - _.output = _.output.open('wb') + _.output = _.output.open('a' if _.append else 'w', buffering=1) + + assert 'b' not in _.output.mode # | can not use byte mode due to line_buffering + assert hasattr(_.output, 'fileno') com = ['jq', '--compact-output', '--monochrome-output', '--unbuffered', _.trans] - _._run = Popen(com, stdout=_.output, stdin=PIPE, stderr=PIPE, bufsize=0) + _._run = Popen(com, stdout=_.output, stdin=PIPE, stderr=PIPE, bufsize=1, encoding='utf8') def dump(_, item: Any) -> N: - # TODO avoice json.dumps leading a string and encode. direct to bytes - a = json.dumps(jsonReprDump(item)).encode() + a = json.dumps(jsonReprDump(item)) _._run.stdin.write(a) + _._run.stdin.write('\n') # | required for line_buffering to instantly write to file + # | If line_buffering is True, flush() is implied when a call to write contains a newline character or a carriage return. _._run.stdin.flush() - # TODO not flushing to file immediatly? + if _._run.poll() is not N: + raise JqDumpInputDiedError() _.output.flush() return _ def done(_) -> Generator[dict, N, N]: _._run.stdin.close() _._run.wait() + _.output.close() if _._run.poll() != 0: rc = _._run.poll() - msg = [x for x in _._run.stderr.read().decode().split('\n') if x] + msg = [x for x in _._run.stderr.read().split('\n') if x] raise JqError(f'(rc={rc}) {msg}') return _ diff --git a/defl/_logger_.py b/defl/_logger_.py index 7c21202..33e529e 100644 --- a/defl/_logger_.py +++ b/defl/_logger_.py @@ -278,8 +278,8 @@ log = Logger() log.setLevel(lvl=None, tgtMap=sys.stderr) -def depricateWarning(*args): +def depricateWarning(*args, frameBack=1): import inspect funcName = inspect.currentframe().f_back.f_code.co_name - log.warning('Deprication warning', funcName, *args, lineInfo=True, frameBackBefore=1) + log.warning('Deprication warning', funcName, *args, lineInfo=True, frameBackBefore=1 + frameBack) diff --git a/defl/_math_.py b/defl/_math_.py index 8360741..f499de6 100644 --- a/defl/_math_.py +++ b/defl/_math_.py @@ -152,52 +152,6 @@ class BaseNumberSystemMaker: return tot -@dataclass(slots=T, kw_only=T, frozen=F) -class UnitFormat: - units: dict[str, int | float] - fmt: Callable | N = N - - def human(_, mag: int | float) -> tuple[str]: - names = tuple(_.units.keys()) - conv = tuple(_.units.values()) - i = 0 - while i + 1 < len(conv) and (nxt := mag / conv[i]) > 1: - i += 1 - mag = nxt - if _.fmt: - return _.fmt(mag, names[i]) - return (mag, names[i]) - - -ByteUnitFormat = partial(UnitFormat, units={'B': 1024, 'K': 1024, 'M': 1024, 'G': 1024, 'T': 1024, 'P': 1024}) -ByteUnitFormatColor = partial( - UnitFormat, - units={ - '[green]B[/green]': 1024, - '[magenta]K[/magenta]': 1024, - '[yellow]M[/yellow]': 1024, - '[orange]G[/orange]': 1024, - '[red]T[/red]': 1024, - '[white]P[/white]': 1024, - }, -) - -ThousandsUnitFormat = partial( - UnitFormat, units={'': 1000, 'e^1': 1000, 'e^2': 1000, 'e^3': 1000, 'e^4': 1000, 'e^5': 1000} -) -ThousandsUnitFormatColor = partial( - UnitFormat, - units={ - '[green][/green]': 1000, - '[magenta]e^1[/magenta]': 1000, - '[yellow]e^2[/yellow]': 1000, - '[orange]e^3[/orange]': 1000, - '[red]e^4[/red]': 1000, - '[white]e^5[/white]': 1000, - }, -) - - def ranges(gap: int, start: int = 0) -> Generator: i = start while T: diff --git a/defl/_networking_.py b/defl/_networking_.py index e3a49f8..f649f68 100644 --- a/defl/_networking_.py +++ b/defl/_networking_.py @@ -1,4 +1,4 @@ -import atexit +import atexit, re from ipaddress import IPv4Address import ipaddress import json @@ -16,10 +16,11 @@ from ._string_ import randomString from ._except_ import tryWrap from ._typing_ import * -from ._typing_ import T, N +from ._typing_ import T, N, F from ._rich_ import * from ipaddress import IPv4Address +from urllib.parse import urlparse, ParseResult regIpAddr = r'\b[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+\b' regIpAddrComp = re.compile(regIpAddr) @@ -154,8 +155,8 @@ def downloadFile(url: str, out: Path, progressStr: str = None): response = requests.get(url, stream=True) size = int(response.headers.get('content-length', 0)) gotten = 0 - deleteOnFile = lambda: out.rm(notExistOk=True) - atexit.register(deleteOnFile) + deleteOnFail = lambda: out.rm(notExistOk=True) + atexit.register(deleteOnFail) with response as r: r.raise_for_status() with out.open('wb') as fp: @@ -165,11 +166,13 @@ def downloadFile(url: str, out: Path, progressStr: str = None): gotten += len(chunk) log.info( f'{progressStr}', - f'{cl.yel}{gotten/1024/1024:,.2f}M{cl.r} / {cl.cyn}{size/1024/1024:,.2f}M{cl.r}', + f'{cl.yel}{gotten/1024/1024:,.2f}M{cl.r}', + '/', + f'{cl.cyn}{size/1024/1024:,.2f}M{cl.r}', end='', ) log.info(cl.res + cl.startOfLine, end='') - atexit.unregister(deleteOnFile) + atexit.unregister(deleteOnFail) def dig( @@ -285,3 +288,85 @@ def pingWait(sendEnv: str | N, sendTo: str, interval: int = 1, waitTime: float = # run.kill('hup') # run.wait() # return Time() - now + + +@dataclass(slots=T, kw_only=T, frozen=F) +class Url: + # todo quoting/dequoting https://docs.python.org/3/library/urllib.parse.html#urllib.parse.quote + parseResult: ParseResult = field(kw_only=T, repr=F) + + def __repr__(_) -> str: + items = ['scheme', 'netloc', 'path', 'query', 'params', 'fragment'] + items = {k: getattr(_.parseResult, k) for k in items} + items = {k: v for k, v in items.items() if v} + return ''.join([f'{_.__class__}', '(', ', '.join([f'{k}={v}' for k, v in items.items() if v]), ')']) + + @classmethod + def From(cls, url: str | ParseResult) -> Self: + if not inst(url, ParseResult): + url = urlparse(url) + newNetLoc = re.compile(r'^www\.', flags=re.I).sub('', url.netloc.lower()) + url = url._replace(netloc=newNetLoc) + return cls(parseResult=url) + + @property + def s(_) -> str: + return _.__str__() + + @property + def string(_) -> str: + return _.__str__() + + @property + def url(_) -> str: + return _.__str__() + + def __str__(_) -> str: + return _.parseResult.geturl() + + def clean( + _, + scheme: bool | str = T, + netloc: bool | str = T, + path: bool | str = T, + query: bool | str = T, + params: bool | str = F, + fragment: bool | str = F, + ) -> Self: + if scheme is not T: + _.parseResult._replace(scheme='' if scheme is F else scheme) + if netloc is not T: + _.parseResult._replace(netloc='' if netloc is F else netloc) + if path is not T: + _.parseResult._replace(path='' if path is F else path) + if query is not T: + _.parseResult._replace(query='' if query is F else query) + if params is not T: + _.parseResult._replace(params='' if params is F else params) + if fragment is not T: + _.parseResult._replace(fragment='' if fragment is F else fragment) + return _ + + @property + def scheme(_) -> str: + return _.parseResult.scheme + + @property + def netloc(_) -> str: + return _.parseResult.netloc + + @property + def path(_) -> str: + return _.parseResult.path + + @property + def query(_) -> str: + return _.parseResult.query + + @property + def params(_) -> str: + return _.parseResult.params + + @property + def fragment(_) -> str: + return _.parseResult.fragment diff --git a/defl/_path/__init__.py b/defl/_path/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/defl/_path/_assertions_.py b/defl/_path/_assertions_.py new file mode 100644 index 0000000..20edddc --- /dev/null +++ b/defl/_path/_assertions_.py @@ -0,0 +1,80 @@ +import itertools +import json, hashlib +import os +import pathlib +import re +import shlex +import shutil +import stat +from contextlib import contextmanager +from dataclasses import dataclass +import base64 +import toml +from .._typing_ import * +from .._except_ import * + +_Dath = TypeVar('_Dath') +if TYPE_CHECKING: + from .._path_ import Dath as _Dath + + +class _DathAssertions: + def assertExists(_) -> Self: + if not _.exists(): + raise DathNotExistsError() + return _ + + def assertIsAbsolute(_) -> Self: + if not _.isAbsolute(): + raise DathNotIsAbsoluteError() + return _ + + def assertIsBlockDevice(_) -> Self: + if not _.isBlockDevice(): + raise DathNotIsBlockDeviceError() + return _ + + def assertIsCharDevice(_) -> Self: + if not _.isCharDevice(): + raise DathNotIsCharDeviceError() + return _ + + def assertIsDir(_) -> Self: + if not _.isDir(): + raise DathNotIsDirError() + return _ + + def assertIsFifo(_) -> Self: + if not _.isFifo(): + raise DathNotIsFifoError() + return _ + + def assertIsFile(_) -> Self: + if not _.isFile(): + raise DathNotIsFileError() + return _ + + def assertIsMount(_) -> Self: + if not _.isMount(): + raise DathNotIsMountError() + return _ + + def assertIsRelativeTo(_) -> Self: + if not _.isRelativeTo(): + raise DathNotIsRelativeToError() + return _ + + def assertIsReserved(_) -> Self: + if not _.isReserved(): + raise DathNotIsReservedError() + return _ + + def assertIsSocket(_) -> Self: + if not _.isSocket(): + raise DathNotIsSocketError() + return _ + + def assertIsSymlink(_) -> Self: + if not _.isSymlink(): + raise DathNotIsSymlinkError() + return _ diff --git a/defl/_path/_depricated_.py b/defl/_path/_depricated_.py new file mode 100644 index 0000000..b4582e7 --- /dev/null +++ b/defl/_path/_depricated_.py @@ -0,0 +1,142 @@ +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate + +import itertools +import json, hashlib +import os +import pathlib +import re +import shlex +import shutil +import stat +from contextlib import contextmanager +from dataclasses import dataclass +import base64 +import toml + +from ..thirdParty import jstyleson +from .._microlang_ import MicroLang +from .._ansii_ import cl +from .._basic_ import listFromArgsStar +from .._logger_ import depricateWarning +from .._typing_ import * +from .._string_ import randomString +from .._time_ import Time +from .._jq_ import jloads, JqLoad, JqDump, jdump +from .._shell_ import expandEnvVar +# from ._except_ import * +# from ._pathChown_ import * +# from ._pathFilter_ import * +# from ._pathStatic_ import staticPath, sp, StaticPath +# from ._pathPathlib_ import _DathPathlib +# from ._pathTemp_ import _PathTemp + + +def _PathPatch(Dath: Type): + # == patch onto Dath + # | patch onto Dath to avoid IDE auto resolution + def _tempFile( #!depricate + _, #!depricate + autoRemove: bool = T, #!depricate + create: bool = T, #!depricate + fmt: str = '{d}_{r}', #!depricate + direct: bool = F, #!depricate + ) -> Dath: #!depricate + "#!depricate" + depricateWarning() + p = N #!depricate + while p is none or p.exists(): #!depricate + d = Time(fmt='%Y-%m-%d_%H-%M-%S-%f').toStr() #!depricate + r = randomString(10) #!depricate + p = _._path / fmt.format(d=d, r=r) #!depricate + p = Dath(p) #!depricate + if autoRemove: #!depricate + p.addOnDelFunc(partial(p.remove, recursive=T)) #!depricate + if create: #!depricate + if direct: #!depricate + p.makeDir() #!depricate + else: #!depricate + p.createFile() #!depricate + return p #!depricate + + Dath.tmp = _tempFile #!depricate + Dath.tempFile = _tempFile #!depricate + Dath.temp = _tempFile + + def _appendText(_, data: Any, **kargs) -> Self: + "#!depricated" + if inst(data, bytes | bytearray): #!depricate + mode = 'ab' #!depricate + else: #!depricate + mode = 'a' #!depricate + data = str(data) #!depricate + with _._path.open(mode, **kargs) as fp: #!depricate + fp.write(data) #!depricate + fp.flush() + return _ #!depricate + + Dath.appendText = _appendText #!depricate + + def _appendLine(_, data: Any, **kargs): #!depricate + "#!depricated" + _.appendText(f'{data}\n', **kargs) #!depricate + + Dath.appendLine = _appendLine #!depricate + + def _Temp(cls): #!depricate + tmp = Dath('~/tmp/').makeDir() #!depricate + return tmp #!depricate + + Dath.Temp = classmethod(_Temp) + + #!depricate + def _Shm(cls): #!depricate + user = os.environ['USER'] #!depricate + tmp = Dath(f'/dev/shm/{user}').makeDir() #!depricate + return tmp #!depricate + + Dath.Shm = classmethod(_Shm) + + #!depricate + def _FromEnv(cls, var: str, orElse: str | N = N): #!depricate + return cls(os.environ.get(var, orElse)) #!depricate + + Dath.FromEnv = classmethod(_FromEnv) + + Dath.jsonc = Dath.loadJsonc # TODO depricate + Dath.json = Dath.loadJson # TODO depricate + Dath.toml = Dath.loadToml # TODO depricate diff --git a/defl/_path/_except_.py b/defl/_path/_except_.py new file mode 100644 index 0000000..ac5816b --- /dev/null +++ b/defl/_path/_except_.py @@ -0,0 +1,85 @@ +from .._typing_ import * + + +_Dath = TypeVar('_Dath') +if TYPE_CHECKING: + from .._path_ import Dath as _Dath + + +class DathValueError(Exception): + __slot__ = () + + +class DathNotInitableError(Exception): + __slot__ = () + + +class DathNotExistsError(Exception): + __slot__ = () + + +class DathNotIsAbsoluteError(Exception): + __slot__ = () + + +class DathNotIsBlockDeviceError(Exception): + __slot__ = () + + +class DathNotIsCharDeviceError(Exception): + __slot__ = () + + +class DathNotIsDirError(Exception): + __slot__ = () + + +class DathNotIsFifoError(Exception): + __slot__ = () + + +class DathNotIsFileError(Exception): + __slot__ = () + + +class DathNotIsMountError(Exception): + __slot__ = () + + +class DathNotIsRelativeToError(Exception): + __slot__ = () + + +class DathNotIsReservedError(Exception): + __slot__ = () + + +class DathNotIsSocketError(Exception): + __slot__ = () + + +class DathNotIsSymlinkError(Exception): + __slot__ = () + + +class DathValueError(Exception): + __slot__ = () + + +__all__ = ( + 'DathValueError', + 'DathNotInitableError', + 'DathNotExistsError', + 'DathNotIsAbsoluteError', + 'DathNotIsBlockDeviceError', + 'DathNotIsCharDeviceError', + 'DathNotIsDirError', + 'DathNotIsFifoError', + 'DathNotIsFileError', + 'DathNotIsMountError', + 'DathNotIsRelativeToError', + 'DathNotIsReservedError', + 'DathNotIsSocketError', + 'DathNotIsSymlinkError', + 'DathValueError', +) diff --git a/defl/_path/_filter_.py b/defl/_path/_filter_.py new file mode 100644 index 0000000..f58adc0 --- /dev/null +++ b/defl/_path/_filter_.py @@ -0,0 +1,76 @@ +import itertools +import json, hashlib +import os +import pathlib +import re +import shlex +import shutil +import stat +from contextlib import contextmanager +from dataclasses import dataclass +import base64 +import toml +from .._typing_ import * +from .._microlang_ import MicroLang +from .._except_ import * + + +_Dath = TypeVar('_Dath') +if TYPE_CHECKING: + from .._path_ import Dath as _Dath + + +@dataclass(slots=T, kw_only=T, frozen=F) +class PathFilter: + # TODO -mount Don't descend directories on other filesystems. + file: bool = F + blockDevice: bool = F + charDevice: bool = F + dir: bool = F + fifo: bool = F + mount: bool = F + reserved: bool = F + socket: bool = F + link: bool = F + test: MicroLang | Iterable | none = N + + def __post_init__(_): + if not (_.file or _.blockDevice or _.charDevice or _.dir or _.fifo or _.mount or _.reserved or _.socket): + _.file = T + _.blockDevice = T + _.charDevice = T + _.dir = T + _.fifo = T + _.mount = T + _.reserved = T + _.socket = T + if _.test and not isinstance(_.test, MicroLang): + _.test = MicroLang(_.test) + + def match(_, path: _Dath | pathlib.Path | str): + if isinstance(path, str): + path = _.__class__(path) + return ( + ( + (_.file and path.is_file()) + or (_.dir and path.is_dir()) + or (_.blockDevice and path.is_block_device()) + or (_.charDevice and path.is_char_device()) + or (_.fifo and path.is_fifo()) + or (_.mount and path.is_mount()) + or (_.reserved and path.is_reserved()) + or (_.socket and path.is_socket()) + ) + and ((not _.link and not path.is_symlink()) or (_.link and path.is_symlink())) + and (not _.test or _.test.eval(path)) + ) + + def filterMany(_, *paths: Iterable, glob: str = N) -> dict: + paths = listFromArgsStar(*paths) + for x in paths: + if glob: + for y in x.glob(glob): + if _.match(y): + yield y + elif _.match(x): + yield x diff --git a/defl/_path/_metaData_.py b/defl/_path/_metaData_.py new file mode 100644 index 0000000..b895185 --- /dev/null +++ b/defl/_path/_metaData_.py @@ -0,0 +1,37 @@ +import itertools +import json, hashlib +import os +import pathlib +import re +import shlex +import shutil +import stat +from contextlib import contextmanager +from dataclasses import dataclass +import base64 +import toml +from .._typing_ import * +from .._except_ import * +from .._basic_ import DotDict + +_Dath = TypeVar('_Dath') +if TYPE_CHECKING: + from .._path_ import Dath as _Dath + + +# TODO stat +class _DathMetaData: + def ffprobe(_): + from .._run_ import Run + + cmd = ['ffprobe', '-v', 'quiet', '-show_streams', '-print_format', 'json', '-show_format', _] + return DotDict(Run(cmd).log().run(T, T).assSuc().json()) + + def exiftool(_): + from .._run_ import Run + + cmd = ['/usr/bin/vendor_perl/exiftool', '-j', _] + res = Run(cmd).log().run(T, T).assSuc().json() + assert len(res) == 1 + res = DotDict(res[0]) + return res diff --git a/defl/_path/_pathlib_.py b/defl/_path/_pathlib_.py new file mode 100644 index 0000000..a078713 --- /dev/null +++ b/defl/_path/_pathlib_.py @@ -0,0 +1,357 @@ +import itertools +import json, hashlib +import os +import pathlib +import re +import shlex +import shutil +import stat +from contextlib import contextmanager +from dataclasses import dataclass +import base64 +import toml +from .._typing_ import * +from .._except_ import * + +_Dath = TypeVar('_Dath') +if TYPE_CHECKING: + from .._path_ import Dath as _Dath + + +class _DathPathlib: + # == pathlib.Path properties + @property + def parts(_) -> List[str]: + return _._path.parts + + @property + def drive(_) -> str: + """ + Path('/a/b/c.d.e.f').drive => '' + Path('a/b/c.d.e.f').drive => '' + """ + return _._path.drive + + @property + def root(_) -> str: + """ + Path('/a/b/c.d.e.f').root => '/' + Path('a/b/c.d.e.f').root => '' + """ + return _._path.root + + @property + def anchor(_) -> str: + """ + Path('/a/b/c.d.e.f').anchor => '/' + Path('a/b/c.d.e.f').anchor => '' + """ + return _._path.anchor + + @property + def parents(_): # -> Generator[_Dath]: + """ + Path('/a/b/c.d.e.f').parents => + Path('a/b/c.d.e.f').parents => + [x for x in _Dath('a/b/c.d.e.f').parents] => [PosixPath('a/b'), PosixPath('a'), PosixPath('.')] + """ + for i in _._path.parents: + yield _.__class__(i) + + @property + def parent(_) -> _Dath: + """ + Path('/a/b/c.d.e.f').parent => Path('/a/b') + Path('a/b/c.d.e.f').parent => Path('a/b') + """ + return _.__class__(_._path.parent) + + @property + def name(_) -> str: + """ + Path('/a/b/c.d.e.f').name => 'c.d.e.f' + Path('a/b/c.d.e.f').name => 'c.d.e.f' + """ + return _._path.name + + @property + def suffix(_) -> str: # | extension + """ + _Dath('/a/b/c.d.e.f').suffix => '.f' + _Dath('a/b/c.d.e.f').suffix => '.f' + """ + return _._path.suffix + + @property + def suffixes(_) -> list: + """ + Path('/a/b/c.d.e.f').suffixes => ['.d', '.e', '.f'] + Path('a/b/c.d.e.f').suffixes => ['.d', '.e', '.f'] + """ + return _._path.suffixes + + @property + def suffixesStr(_) -> list: + """ + Path('/a/b/c.d.e.f').suffixes => '.d.e.f'] + Path('a/b/c.d.e.f').suffixes => '.d.e.f'] + """ + return ''.join(_.suffixes) + + @property + def stem(_) -> str: + """ + Path('/a/b/c.d.e.f').stem => 'c.d.e' + Path('a/b/c.d.e.f').stem => 'c.d.e' + """ + return _._path.stem + + @property + def stemBase(_) -> str: + # not part of pathlib + """ + Path('/a/b/c.d.e.f').stem => 'c' + Path('a/b/c.d.e.f').stem => 'c' + """ + return re.sub(r'\..*', '', _._path.name) + + # == pathlib.Path functions + + def absolute(_): + return _.castToPath(_._path.absolute()) + + def as_posix(_): + return _.castToPath(_._path.as_posix()) + + def as_uri(_): + return _.castToPath(_._path.as_uri()) + + def chmod(_, mode, *, follow_symlinks=T): + return _.castToPath(_._path.chmod(mode, follow_symlinks=follow_symlinks)) + + def stat(_, *, follow_symlinks=T): + return _._path.stat(follow_symlinks=follow_symlinks) + + def exists(_) -> bool: + return _._path.exists() + + def expanduser(_): + return _.castToPath(_._path.expanduser()) + + def glob(_, pattern): + for i in _._path.glob(pattern): + yield _.castToPath(i) + + def group(_): + return _.castToPath(_._path.group()) + + def hardlink_to(_, target): + return _.castToPath(_._path.hardlink_to(target)) + + def is_absolute(_) -> bool: + return _._path.is_absolute() + + def is_block_device(_) -> bool: + return _._path.is_block_device() + + def is_char_device(_) -> bool: + return _._path.is_char_device() + + def is_dir(_) -> bool: + return _._path.is_dir() + + def is_fifo(_) -> bool: + return _._path.is_fifo() + + def is_file(_) -> bool: + return _._path.is_file() + + def is_mount(_) -> bool: + return _._path.is_mount() + + def is_relative_to(_, *other): + other = [x._path if isinstance(x, _.__class__) else x for x in other] + return _._path.is_relative_to(*other) + + def is_reserved(_) -> bool: + return _._path.is_reserved() + + def is_socket(_) -> bool: + return _._path.is_socket() + + def is_symlink(_) -> bool: + return _._path.is_symlink() + + def iterdir(_): + for i in _._path.iterdir(): + yield _.__class__(i) + + def joinpath(_, *other): + return _.castToPath(_._path.joinpath(*other)) + + def lchmod(_, mode): + return _.castToPath(_._path.lchmod(mode)) + + def link_to(_, target): + if isinstance(target, _.__class__ | pathlib.Path): + target = str(target) + return _.castToPath(_._path.link_to(target)) + + def lstat(_): + return _.castToPath(_._path.lstat()) + + def match(_, pattern): + return _.castToPath(_._path.match(pattern)) + + def mkdir(_, mode=0o700, parents=F, exist_ok=F): + _._path.mkdir(mode=mode, parents=parents, exist_ok=exist_ok) + return _ + + def open(_, mode='r', buffering=-1, encoding=N, errors=N, newline=N): + return _.castToPath( + _._path.open( + mode=mode, + buffering=buffering, + encoding=encoding, + errors=errors, + newline=newline, + ) + ) + + def owner(_): + return _.castToPath(_._path.owner()) + + def read_bytes(_): + return _._path.read_bytes() + + def read_text(_, encoding=N, errors=N): + return _._path.read_text(encoding=encoding, errors=errors) + + def readlink(_): + return _.castToPath(_._path.readlink()) + + def readlinkFull(_): + path = _._path + while path.is_symlink(): + path = path.absolute() + path = path.readlink() + if not path.is_absolute(): + path = (_._path if _._path.is_dir() else _._path.parent) / path + path = path.resolve() + return _.castToPath(path) + + def relative_to(_, *other): + other = [x._path if isinstance(x, _.__class__) else x for x in other] + return _.clone(_._path.relative_to(*other)) + + def rename(_, target): + return _.castToPath(_._path.rename(str(target))) + + move = rename + mv = rename + + def replace(_, target): + return _.castToPath(_._path.replace(target)) + + def resolve(_, strict=F): + return _.castToPath(_._path.resolve(strict=strict)) + + def rglob(_, pattern): + raise NotImplementedError() + return _.castToPath(_._path.rglob(pattern)) + + def rmdir(_, logTo: callable = N): + if logTo: + logTo(f'{cl.mag}rmdir{cl.r} {cl.yel}{_._path}{cl.r}') + return _.castToPath(_._path.rmdir()) + + def samefile(_, other_path): + return _.castToPath(_._path.samefile(other_path)) + + def symlink_to( + _, + target, + # Ignored on unix. Required on win. + target_is_directory=F, + ): + target = target._path if isinstance(target, _.__class__) else target + return _.castToPath(_._path.symlink_to(target, target_is_directory=target_is_directory)) + + def touch(_, mode=0o600, exist_ok=T): + _.castToPath(_._path.touch(mode=mode, exist_ok=exist_ok)) + return _ + + def unlink(_, missing_ok=F): + return _.castToPath(_._path.unlink(missing_ok=missing_ok)) + + def with_name(_, name): + return _.castToPath(_._path.with_name(name)) + + def with_stem(_, stem): + return _.castToPath(_._path.with_stem(stem)) + + def with_suffix(_, suffix): + return _.castToPath(_._path.with_suffix(suffix)) + + def write_bytes(_, data): + _._path.write_bytes(data) + return _ + + def write_text(_, data, encoding=N, errors=N, newline=N): + _._path.write_text(data) + return _ + + def __truediv__(_, arg): + return _.clone(_._path / str(arg)) + + # == camel + asPosix = as_posix + asUri = as_uri + hardlinkTo = hardlink_to + isAbsolute = is_absolute + isBlockDevice = is_block_device + isCharDevice = is_char_device + isDir = is_dir + isFifo = is_fifo + isFile = is_file + isMount = is_mount + isRelativeTo = is_relative_to + isReserved = is_reserved + isSocket = is_socket + isSymlink = is_symlink + linkTo = link_to + readBytes = read_bytes + readText = read_text + relativeTo = relative_to + symlinkTo = symlink_to + withName = with_name + withStem = with_stem + withSuffix = with_suffix + writeBytes = write_bytes + writeText = write_text + + # == lower + asposix = as_posix + asuri = as_uri + hardlinkto = hardlink_to + isabsolute = is_absolute + isblockdevice = is_block_device + ischardevice = is_char_device + isdir = is_dir + isfifo = is_fifo + isfile = is_file + ismount = is_mount + isrelativeto = is_relative_to + isreserved = is_reserved + issocket = is_socket + issymlink = is_symlink + linkto = link_to + readbytes = read_bytes + readtext = read_text + relativeto = relative_to + symlinkto = symlink_to + withname = with_name + withstem = with_stem + withsuffix = with_suffix + writebytes = write_bytes + writetext = write_text diff --git a/defl/_path/_permission_.py b/defl/_path/_permission_.py new file mode 100644 index 0000000..4cc72a2 --- /dev/null +++ b/defl/_path/_permission_.py @@ -0,0 +1,68 @@ +import itertools +import json, hashlib +import os +import pathlib +import re +import shlex +import shutil +import stat +from contextlib import contextmanager +from dataclasses import dataclass +import base64 +import toml +from .._typing_ import * +from .._except_ import * + + +_Dath = TypeVar('_Dath') +if TYPE_CHECKING: + from .._path_ import Dath as _Dath + + +class _DathPermission: + @staticmethod + def ugorwx(ugo, rwx) -> Literal[256, 128, 64, 32, 16, 8, 4, 2, 1]: + match (ugo, rwx): + case ('u', 'r'): + return stat.S_IRUSR + case ('u', 'w'): + return stat.S_IWUSR + case ('u', 'x'): + return stat.S_IXUSR + case ('g', 'r'): + return stat.S_IRGRP + case ('g', 'w'): + return stat.S_IWGRP + case ('g', 'x'): + return stat.S_IXGRP + case ('o', 'r'): + return stat.S_IROTH + case ('o', 'w'): + return stat.S_IWOTH + case ('o', 'x'): + return stat.S_IXOTH + raise DathValueError(f'{ugo=} {rwx=}') + + def chmodUpdate(_, *updates: list[str]): + if not updates: + if _.isFile(): + _._path.chmod(0o600) + elif _.isDir(): + _._path.chmod(0o700) + return + + orig = _.st.st_mode + new = orig + for i in updates: + ugo, pm, rwx = i + assert ugo in ['u', 'o', 'g'] + assert rwx in ['r', 'x', 'w'] + assert pm in ['+', '-'] + mode = _.ugorwx(ugo, rwx) + if pm == '+': + new = mode | new + else: + new = mode & new + if new != orig: + _.chmod(new) + return _ diff --git a/defl/_path/_readWrite_.py b/defl/_path/_readWrite_.py new file mode 100644 index 0000000..5f1fa5a --- /dev/null +++ b/defl/_path/_readWrite_.py @@ -0,0 +1,187 @@ +import itertools +import json, hashlib +import os +import pathlib, io +import re +import shlex +import shutil +import stat +from contextlib import contextmanager +from dataclasses import dataclass +import base64 +import toml +from .._typing_ import * +from .._except_ import * +from ..thirdParty import jstyleson +from .._jq_ import jload, jloads, jdump, jdumps, JqLoad + + +_Dath = TypeVar('_Dath') +if TYPE_CHECKING: + from .._path_ import Dath as _Dath + + +class LoadCanNotResolveExtensionError(Exception): + __slot__ = () + + +class FileSizeReducedError(Exception): + __slot__ = () + + +class _DathReadWrite: + # == open files + def _encodeData(_, data: Any, joinIter=b'\n'): + if inst(data, Mapping): + return jdumps(data, asBytes=T) + elif inst(data, bytearray | bytes): + return data + elif inst(data, str): + return data.encode('utf8') + elif inst(data, Iterable): + bytz = bytearray() + for item in data: + bytz += _._encodeData(item) + joinIter + return bytz + raise TypeError(str(type(data))) + + def write(_, data: Any) -> Self: + with _._path.open('wb') as fp: + fp.write(_._encodeData(data)) + fp.flush() + return _ + + def writeJson(_, data: Any, **args) -> Self: + with _._path.open('w') as fp: + jdump(data=data, output=fp, **args) + return _ + + def append(_, data: Any, nl: bool = T) -> Self: + with _._path.open('ab') as fp: + fp.write(_._encodeData(data)) + if nl: + fp.write(b'\n') + fp.flush() + return _ + + def readLines(_, strip: str = '\n', removeEmpty: bool = T): + with _._path.open('r') as fp: + while line := fp.readline(): + if strip: + line = line.strip(strip) + if removeEmpty and not line.strip(): + continue + yield line + + def readLinesBin(_): + with _._path.open('rb') as fp: + while line := fp.readline(): + yield line + + # == load + def load(_, onNotExist: Any = ..., onLoadFail: Any = ...): + if not _.exists(): + if onNotExist is not ...: + return onNotExist() if callable(onNotExist) else onNotExist + raise FileNotFoundError(f'{_}') + sfx = _.suffix.lower() + func = { + '.json': _.loadJson, + '.jsonl': _.loadJsonl, + '.jsonc': _.loadJsonc, + '.toml': _.loadToml, + '.ini': _.loadIni, + }.get(sfx) + + if not func: + raise LoadCanNotResolveExtensionError(f'suffix={sfx!r}') + + try: + try: + return func() + except json.decoder.JSONDecodeError as e: + # TODO toml jsonc ini csv + if onLoadFail is not ...: + return onLoadFail() if callable(onLoadFail) else onLoadFail + raise e + except Exception as e: + e.add_note(f'function={func}') + raise e + + def loadJson(_): + try: + return jloads(_.readText()) + except json.decoder.JSONDecodeError as e: + e.add_note(str(_)) + raise e + + def loadJsonl(_): + with _.open('rb') as fp: + yield from JqLoad().fromFile(fp) + + def loadJsonc(_): + return jstyleson.loads(_.readText()) + + def loadToml(_): + return toml.loads(_.readText()) + + def loadIni(_): + from configparser import RawConfigParser + + config = RawConfigParser() + config.read(str(_._path)) + dictionary = {} + for section in config.sections(): + dictionary[section] = {} + for option in config.options(section): + dictionary[section][option] = config.get(section, option) + return dictionary + + # == save + def saveJson(_, data) -> N: + raise NotImplementedError('') + _.writeText(json.dumps(data, indent=2)) + + def saveJsonl(_, data) -> N: + raise NotImplementedError('') + with _._path.open('w') as fp: + for i in data: + JqDump(output=fp).dump(i) + + def saveJsonc(_) -> N: + raise NotImplementedError('') + + def saveToml(_) -> N: + raise NotImplementedError('') + + def saveIni(_) -> N: + raise NotImplementedError('') + + # == other + def countLines(_) -> int: + totLines = 0 + with _.open('r') as fp: + while fp.readline(): + totLines += 1 + return totLines + + def assertSizeDoesNotReduce(_) -> Generator[int, N, N]: + size = _.size() + while T: + yield size + if (newSize := _.size()) < size: + raise FileSizeReducedError(_) + size = newSize + + def assertSizeDoesNotReduceFp(_, fp: io.FileIO) -> Generator[int, N, N]: + # TODO not thread safe + size = 0 + while T: + cur = fp.tell() + fp.seek(0, 2) + newSize = fp.tell() + fp.seek(cur) + if newSize < size: + raise FileSizeReducedError(_) + size = newSize + yield size diff --git a/defl/_path/_static_.py b/defl/_path/_static_.py new file mode 100644 index 0000000..8ea905b --- /dev/null +++ b/defl/_path/_static_.py @@ -0,0 +1,62 @@ +import itertools +import json, hashlib +import os +import pathlib +import re +import shlex +import shutil +import stat +from contextlib import contextmanager +from dataclasses import dataclass +import base64 +import toml +from .._typing_ import * +from .._except_ import * + +_Dath = TypeVar('_Dath') +if TYPE_CHECKING: + from .._path_ import Dath as _Dath + + +@dataclass(slots=T, kw_only=T, frozen=F) +class StaticPath: + _cache: dict = field(default_factory=dict, kw_only=T, repr=T) + + @property + def home(_) -> _Dath: + from .._path_ import Dath as _Dath + + if 'home' not in _._cache: + _._cache['home'] = _Dath(os.environ['HOME']) + return _._cache['home'] + + @property + def tmp(_) -> _Dath: + if 'tmp' not in _._cache: + _._cache['tmp'] = (_.home / 'tmp').makeDir() + return _._cache['tmp'] + + @property + def log(_) -> _Dath: + if 'log' not in _._cache: + _._cache['log'] = (_.home / 'log').makeDir() + return _._cache['log'] + + @property + def shm(_) -> _Dath: + from .._path_ import Dath as _Dath + + if 'shm' not in _._cache: + _._cache['shm'] = _Dath(f'/dev/shm/{user}') + return _._cache['shm'] + + +staticPath = StaticPath() +sp = staticPath + + +def _NotInitable(*args, **kargs): + raise DathNotInitableError() + + +StaticPath.__init__ = _NotInitable diff --git a/defl/_path/_temp_.py b/defl/_path/_temp_.py new file mode 100644 index 0000000..6a41c20 --- /dev/null +++ b/defl/_path/_temp_.py @@ -0,0 +1,132 @@ +import itertools +import json, hashlib +import os +import pathlib +import re +import shlex +import shutil +import stat +from contextlib import contextmanager +from dataclasses import dataclass +import base64 +import toml +from .._typing_ import * +from .._except_ import * +from .._time_ import Time +from .._basic_ import Enumer + +_Dath = TypeVar('_Dath') +if TYPE_CHECKING: + from .._path_ import Dath as _Dath + + +class TempPathItem(Enumer): + Name = enum.auto() + Date = enum.auto() + Index = enum.auto() + Ext = enum.auto() + Random = enum.auto() + + +_TPI = TempPathItem + + +class TempPathNoIndexInFmtError(Exception): + __slot__ = () + + +@dataclass(slots=T, kw_only=T, frozen=F) +class TempPath: + path: _Dath + join: str = '_' + + _comp: list[Any] = field(default_factory=list) + + TNI: ClassVar = TempPathItem + TempPathNoIndexInFmtError: ClassVar = TempPathNoIndexInFmtError + fmt: list[str] = field(default_factory=lambda: [_TPI.Name, _TPI.Date, _TPI.Index, _TPI.Ext]) + + def __post_init__(_) -> N: + if _.TNI.Index not in _.fmt: + raise TempPathNoIndexInFmtError() + + _ext: str = N + _name: str = N + + def setName(_, name: Any) -> Self: + if name: + _._name = str(name) + return _ + + def setExt(_, ext: str) -> Self: + if ext: + _._ext = str(ext) + return _ + + def generate(_) -> Generator[_Dath, N, N]: + # TODO except OSError and e.errno==36 # (36, 'File name too long') + index = 0 + while T: + name = [] + for x in _.fmt: + match x: + case TempPathItem.Name: + if name: + name.append('_') + if _._name: + name.append(_._name) + case TempPathItem.Ext: + if _._ext: + if not _._ext.startswith('.'): + name.append('.') + name.append(_._ext) + case TempPathItem.Index: + if index > 0: + if name: + name.append('_') + name.append(f'{index:0>5d}') + case TempPathItem.Date: + if name: + name.append('_') + d = Time(fmt=Time.Fmt.ymdhmsNoSpace).toStr() + name.append(d) + case __: + raise NotImplementedError(f'no match for {x}') + yield _.path / ''.join(name) + index += 1 + + @staticmethod + def MakeTempPath( + path: _Dath, # | this is self of `Dath` because this function is called in _PathTemp which is mixin of Dath + name: str | Iterable, + /, + autoRemove: bool, + create: bool, + direct: bool, + ext: str = N, + fmt: list[TempPathItem] = N, + ) -> _Dath: + if not inst(name, str): + name = '_'.join(name) + tmp = TempPath(path=path).setExt(ext).setName(name) + for p in tmp.generate(): + if not p.exists(): + break + p = path.__class__(p) + if autoRemove: + p.addOnDelFunc(partial(p.remove, recursive=T)) + if create: + p.makeDir() if direct else p.createFile() + return p + + +@dataclass(slots=T, kw_only=T, frozen=F) +class _PathTemp: + makeTemp = TempPath.MakeTempPath + + def latestTemp(_, name: str, one: bool = T) -> _Dath | list[_Dath]: + paths = _.glob(name + '_*') # | symmetric with `_.makeTemp(name=...)` + paths = sorted(paths, key=lambda x: x.name) + if one: + paths = paths[-1] if paths else N + return paths diff --git a/defl/_path/_utils_.py b/defl/_path/_utils_.py new file mode 100644 index 0000000..aef69fd --- /dev/null +++ b/defl/_path/_utils_.py @@ -0,0 +1,190 @@ +import atexit +import itertools +import os, shutil +import re +from contextlib import contextmanager +from dataclasses import KW_ONLY, dataclass + +from .._basic_ import flattenNested, Enumer +from .._logger_ import log +from .._path_ import Dath, currentWorkingDir +from .._runCommand_ import CommandFailedException, RunCom +from .._system_ import CommandNotFound, platformResolve +from .._typing_ import * +from .._run_ import Run +from .._rich_ import richReprAuto + + +def which(com: str, raiseOnFail: bool = F) -> Dath: + r = shutil.which(str(com)) + if r: + return Dath(r) + if raiseOnFail: + raise CommandNotFound(com) + + +class LockExists(Exception): ... + + +@dataclass(slots=T, kw_only=T, frozen=F) +class LockFile: + # TODO access time older then auto remove + path: Dath = field(kw_only=F, repr=T) + wait: bool + rmOnExit: bool = T + waitLog: bool = T + + def __post_init__(_): + _.path = Dath(_.path) + + def _rmOnExitFun(_): + if _.path.exists(): + _.path.remove() + + def isSet(_): + return _.path.exists() + + def aquire(_, checkInterval: float = 0.5): + while _.path.exists(): + if _.wait: + if _.waitLog: + log.info('waiting for lock file', _.path, tgt='err', end='') + while _.path.exists(): + sleep(checkInterval) + if _.waitLog: + log.info('', tgt='err', end='') + else: + raise LockExists(_.path) + + # print('created lock file', _.path, tgt='err') + _.path.touch() + if _.rmOnExit: + atexit.register(_._rmOnExitFun) + + def remove(_): + # print('removing lock file', _.path, tgt='err') + _.path.remove() + if _.rmOnExit: + atexit.unregister(_._rmOnExitFun) + + __enter__ = aquire + + def __exit__(_, *args, **kargs): + _.remove() + + +@contextmanager +def changeWorkingDir(path): + prevDir = currentWorkingDir() + os.chdir(str(path)) + try: + yield + finally: + os.chdir(str(prevDir.absolute())) + + +@dataclass(slots=T, kw_only=T, frozen=F) +class Find: + pattern: str = field(default='.', kw_only=F, repr=T) + options: list = field(default_factory=list) + + def __post_init__(_) -> None: + _.options.extend(['--color', 'never', '--no-ignore', '--hidden']) + + def run( + _, + *path, + relTo: bool = F, + ): + assert inst(relTo, bool) + assert _.pattern is not None + assert path + if relTo: + assert len(path) == 1 + cmd = ['fd', *_.options, '--base-directory', path[0], '--', _.pattern] + else: + cmd = ['fd', *_.options, '--', _.pattern, *path] + run: Run = Run(cmd).setThreadRead().setLogTo().run(T, T) + while run.outReader.hasMore(): + for line in run.out: + yield Dath(line.decode()) + run.wait().assSuc() + + def setOption(_, key: str, val: Any) -> Self: + _.options.append(f'--{key}') + if val is not None: + _.options.append(val) + return _ + + def setPattern(_, val: str) -> Self: + _.pattern = val + return _ + + def setNoHidden(_) -> Self: + if '--hidden' in _.options: + _.options.pop('--hidden') + return _ + + def setIgnore(_) -> Self: + if '--no-ignore' in _.options: + _.options.pop('--no-ignore') + return _ + + def setCaseSensitive(_) -> Self: + return _.setOption('case-sensitive', None) + + def setIgnoreCase(_) -> Self: + return _.setOption('ignore-case', None) + + def setGlob(_) -> Self: + return _.setOption('glob', None) + + def setAbsolutePath(_) -> Self: + return _.setOption('absolute-path', None) + + def setListDetails(_) -> Self: + return _.setOption('list-details', None) + + def setFollow(_) -> Self: + return _.setOption('follow', None) + + def setFullPath(_) -> Self: + return _.setOption('full-path', None) + + def setMaxDepth(_, depth: int) -> Self: + return _.setOption('max-depth', depth) + + def setExclude(_, *pattern: str) -> Self: + for i in pattern: + _.setOption('exclude', i) + return _ + + def setType(_, filetype: str) -> Self: + return _.setOption('type', filetype) + + def setExtension(_, ext: str) -> Self: + return _.setOption('extension', ext) + + def setSize(_, size: str) -> Self: + return _.setOption('size', size) + + def setChangedWithin(_, dateOrDur: str) -> Self: + return _.setOption('changed-within', dateOrDur) + + def setChangedBefore(_, dateOrDur: str) -> Self: + return _.setOption('changed-before', dateOrDur) + + def setOwner(_, userGroup: str) -> Self: + return _.setOption('owner', userGroup) + + def setFormat(_, fmt: str) -> Self: + return _.setOption('format', fmt) + + def setExec(_, cmd: str) -> Self: + return _.setOption('exec', cmd) + + def setExecBatch(_, cmd: str) -> Self: + return _.setOption('exec-batch', cmd) + + def setColor(_, when: str) -> Self: + return _.setOption('color', when) diff --git a/defl/_pathUtils_.py b/defl/_pathUtils_.py index dc91aad..7d2cec4 100644 --- a/defl/_pathUtils_.py +++ b/defl/_pathUtils_.py @@ -1,188 +1 @@ -import atexit -import itertools -import os, shutil -import re -from contextlib import contextmanager -from dataclasses import KW_ONLY, dataclass - -from ._basic_ import flattenNested, Enumer -from ._logger_ import log -from ._path_ import Dath, currentWorkingDir -from ._runCommand_ import CommandFailedException, RunCom -from ._system_ import CommandNotFound, platformResolve -from ._typing_ import * -from ._run_ import Run -from ._rich_ import richReprAuto - - -def which(com: str, raiseOnFail: bool = F) -> Dath: - r = shutil.which(str(com)) - if r: - return Dath(r) - if raiseOnFail: - raise CommandNotFound(com) - - -class LockExists(Exception): ... - - -@dataclass(slots=T, kw_only=T, frozen=F) -class LockFile: - # TODO access time older then auto remove - path: Dath = field(kw_only=F, repr=T) - wait: bool - rmOnExit: bool = T - waitLog: bool = T - - def __post_init__(_): - from .inotify_ import INotifyWait - - _.path = Dath(_.path) - - def _rmOnExitFun(_): - if _.path.exists(): - _.path.remove() - - def isSet(_): - return _.path.exists() - - def aquire(_): - while _.path.exists(): - if _.wait: - if _.waitLog: - log.info('waiting for lock file', _.path, tgt='err', end='') - while _.path.exists(): - sleep(0.5) - if _.waitLog: - log.info('', tgt='err', end='') - else: - raise LockExists(_.path) - - # print('created lock file', _.path, tgt='err') - _.path.touch() - if _.rmOnExit: - atexit.register(_._rmOnExitFun) - - def remove(_): - # print('removing lock file', _.path, tgt='err') - _.path.remove() - if _.rmOnExit: - atexit.unregister(_._rmOnExitFun) - - __enter__ = aquire - - def __exit__(_, *args, **kargs): - _.remove() - - -@contextmanager -def changeWorkingDir(path): - prevDir = currentWorkingDir() - os.chdir(str(path)) - try: - yield - finally: - os.chdir(str(prevDir.absolute())) - - -@dataclass(slots=T, kw_only=T, frozen=F) -class Find: - pattern: str = field(default='.', kw_only=F, repr=T) - options: list = field(default_factory=list) - - def __post_init__(_) -> None: - _.options.extend(['--color', 'never', '--no-ignore', '--hidden']) - - def run(_, *path, relTo: bool = F): - assert inst(relTo, bool) - assert _.pattern is not None - assert path - if relTo: - assert len(path) == 1 - cmd = ['fd', *_.options, '--base-directory', path[0], '--', _.pattern] - else: - cmd = ['fd', *_.options, '--', _.pattern, *path] - run: Run = Run(cmd).setThreadRead().run(T, T) - while run.outReader.hasMore(): - for line in run.out: - yield Dath(line.decode()) - run.wait().assSuc() - - def setOption(_, key: str, val: Any) -> Self: - _.options.append(f'--{key}') - if val is not None: - _.options.append(val) - return _ - - def setPattern(_, val: str) -> Self: - _.pattern = val - return _ - - def setNoHidden(_) -> Self: - if '--hidden' in _.options: - _.options.pop('--hidden') - return _ - - def setIgnore(_) -> Self: - if '--no-ignore' in _.options: - _.options.pop('--no-ignore') - return _ - - def setCaseSensitive(_) -> Self: - return _.setOption('case-sensitive', None) - - def setIgnoreCase(_) -> Self: - return _.setOption('ignore-case', None) - - def setGlob(_) -> Self: - return _.setOption('glob', None) - - def setAbsolutePath(_) -> Self: - return _.setOption('absolute-path', None) - - def setListDetails(_) -> Self: - return _.setOption('list-details', None) - - def setFollow(_) -> Self: - return _.setOption('follow', None) - - def setFullPath(_) -> Self: - return _.setOption('full-path', None) - - def setMaxDepth(_, depth: int) -> Self: - return _.setOption('max-depth', depth) - - def setExclude(_, *pattern: str) -> Self: - for i in pattern: - _.setOption('exclude', i) - return _ - - def setType(_, filetype: str) -> Self: - return _.setOption('type', filetype) - - def setExtension(_, ext: str) -> Self: - return _.setOption('extension', ext) - - def setSize(_, size: str) -> Self: - return _.setOption('size', size) - - def setChangedWithin(_, dateOrDur: str) -> Self: - return _.setOption('changed-within', dateOrDur) - - def setChangedBefore(_, dateOrDur: str) -> Self: - return _.setOption('changed-before', dateOrDur) - - def setOwner(_, userGroup: str) -> Self: - return _.setOption('owner', userGroup) - - def setFormat(_, fmt: str) -> Self: - return _.setOption('format', fmt) - - def setExec(_, cmd: str) -> Self: - return _.setOption('exec', cmd) - - def setExecBatch(_, cmd: str) -> Self: - return _.setOption('exec-batch', cmd) - - def setColor(_, when: str) -> Self: - return _.setOption('color', when) +from ._path._utils_ import * diff --git a/defl/_path_.py b/defl/_path_.py index 6a2f2e3..3f3302c 100644 --- a/defl/_path_.py +++ b/defl/_path_.py @@ -22,211 +22,32 @@ from ._string_ import randomString from ._time_ import Time from ._jq_ import jloads, JqLoad, JqDump, jdump from ._shell_ import expandEnvVar +from ._path._except_ import * +from ._path._filter_ import * +from ._path._static_ import staticPath, sp, StaticPath +from ._path._pathlib_ import _DathPathlib +from ._path._temp_ import _PathTemp +from ._path._depricated_ import _PathPatch +from ._path._assertions_ import _DathAssertions +from ._path._readWrite_ import _DathReadWrite +from ._path._permission_ import _DathPermission +from ._path._metaData_ import _DathMetaData + Dath = TypeVar('Dath') + # TODO path is on same filesystem # TODO _.castToPath to Dath.clone() -class DathValueError(Exception): - __slot__ = () - - -class DathNotInitableError(Exception): - __slot__ = () - - -class DathNotExistsError(Exception): - __slot__ = () - - -class DathNotIsAbsoluteError(Exception): - __slot__ = () - - -class DathNotIsBlockDeviceError(Exception): - __slot__ = () - - -class DathNotIsCharDeviceError(Exception): - __slot__ = () - - -class DathNotIsDirError(Exception): - __slot__ = () - - -class DathNotIsFifoError(Exception): - __slot__ = () - - -class DathNotIsFileError(Exception): - __slot__ = () - - -class DathNotIsMountError(Exception): - __slot__ = () - - -class DathNotIsRelativeToError(Exception): - __slot__ = () - - -class DathNotIsReservedError(Exception): - __slot__ = () - - -class DathNotIsSocketError(Exception): - __slot__ = () - - -class DathNotIsSymlinkError(Exception): - __slot__ = () - - -class DathValueError(Exception): - __slot__ = () - - -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ -# ◼◼ PATH FILTER -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - -@dataclass(slots=T, kw_only=T, frozen=F) -class PathFilter: - # TODO -mount Don't descend directories on other filesystems. - file: bool = F - blockDevice: bool = F - charDevice: bool = F - dir: bool = F - fifo: bool = F - mount: bool = F - reserved: bool = F - socket: bool = F - link: bool = F - test: MicroLang | Iterable | none = N - - def __post_init__(_): - if not (_.file or _.blockDevice or _.charDevice or _.dir or _.fifo or _.mount or _.reserved or _.socket): - _.file = T - _.blockDevice = T - _.charDevice = T - _.dir = T - _.fifo = T - _.mount = T - _.reserved = T - _.socket = T - if _.test and not isinstance(_.test, MicroLang): - _.test = MicroLang(_.test) - - def match(_, path: Dath | pathlib.Path | str): - if isinstance(path, str): - path = Dath(path) - return ( - ( - (_.file and path.is_file()) - or (_.dir and path.is_dir()) - or (_.blockDevice and path.is_block_device()) - or (_.charDevice and path.is_char_device()) - or (_.fifo and path.is_fifo()) - or (_.mount and path.is_mount()) - or (_.reserved and path.is_reserved()) - or (_.socket and path.is_socket()) - ) - and ((not _.link and not path.is_symlink()) or (_.link and path.is_symlink())) - and (not _.test or _.test.eval(path)) - ) - - def filterMany(_, *paths: Iterable, glob: str = N) -> dict: - paths = listFromArgsStar(*paths) - for x in paths: - if glob: - for y in x.glob(glob): - if _.match(y): - yield y - elif _.match(x): - yield x - - -def ugorwx(ugo, rwx) -> Literal[256, 128, 64, 32, 16, 8, 4, 2, 1]: - match (ugo, rwx): - case ('u', 'r'): - return stat.S_IRUSR - case ('u', 'w'): - return stat.S_IWUSR - case ('u', 'x'): - return stat.S_IXUSR - case ('g', 'r'): - return stat.S_IRGRP - case ('g', 'w'): - return stat.S_IWGRP - case ('g', 'x'): - return stat.S_IXGRP - case ('o', 'r'): - return stat.S_IROTH - case ('o', 'w'): - return stat.S_IWOTH - case ('o', 'x'): - return stat.S_IXOTH - raise DathValueError(f'{ugo=} {rwx=}') - - -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ -# ◼◼ STATIC PATH -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ -@dataclass(slots=T, kw_only=T, frozen=F) -class StaticPath: - _cache: dict = field(default_factory=dict, kw_only=T, repr=T) - - @property - def home(_) -> Dath: - if 'home' not in _._cache: - _._cache['home'] = Dath(os.environ['HOME']) - return _._cache['home'] - - @property - def tmp(_) -> Dath: - if 'tmp' not in _._cache: - _._cache['tmp'] = (_.home / 'tmp').makeDir() - return _._cache['tmp'] - - @property - def log(_) -> Dath: - if 'log' not in _._cache: - _._cache['log'] = (_.home / 'log').makeDir() - return _._cache['log'] - - @property - def shm(_) -> Dath: - if 'shm' not in _._cache: - _._cache['shm'] = Dath(f'/dev/shm/{user}') - return _._cache['shm'] - - -staticPath = StaticPath() -sp = staticPath - - -def NotInitable(*args, **kargs): - raise DathNotInitableError() - - -StaticPath.__init__ = NotInitable - -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ -# ◼◼ PATH -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - -class Dath: - __slots__ = ('_path', '_pathStr', '_stat', '_onDel') +class Dath(_DathPathlib, _PathTemp, _DathAssertions, _DathReadWrite, _DathPermission, _DathMetaData): + # TODO use mixin classes to seperate all these functions + __slots__ = ('_path', '_pathStr', '_onDel') def __init__(_, *args): _._path: pathlib.Path = N _._pathStr: str | none = N - _._stat: os.stat_result = N _._onDel: list | N = N # == build path @@ -247,29 +68,6 @@ class Dath: kargs[i] = getattr(_, i) return Dath(*args, **kargs) - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ constructor - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - @classmethod - def Temp(cls): #!depricate - tmp = Dath('~/tmp/').makeDir() #!depricate - return tmp #!depricate - - @classmethod #!depricate - def Shm(cls): #!depricate - user = os.environ['USER'] #!depricate - tmp = Dath(f'/dev/shm/{user}').makeDir() #!depricate - return tmp #!depricate - - @classmethod #!depricate - def FromEnv(cls, var: str, orElse: str | N = N): #!depricate - return cls(os.environ.get(var, orElse)) #!depricate - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ helper - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - _reduceSlashReg = re.compile(r'(/+)') @staticmethod @@ -284,9 +82,7 @@ class Dath: val = _.clone(val) return val - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ properties - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ + # == properties @property def sp(_) -> StaticPath: @@ -302,9 +98,7 @@ class Dath: @property def st(_): - if not _._stat: - _._stat = _._path.stat() - return _._stat + return _._path.stat() @property def str(_): @@ -336,9 +130,7 @@ class Dath: def isHidden(_): return _.name.startswith('.') - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ functions - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ + # == functions def gitRepoRoot(_) -> Dath: gitRoot = _ while not (gitRoot / '.git').exists(): @@ -409,31 +201,6 @@ class Dath: def FromParts(*parts): return Dath(Dath._fromParts(*parts)) - def chmodUpdate(_, *updates: list[str]): - if not updates: - if _.isFile(): - _._path.chmod(0o600) - elif _.isDir(): - _._path.chmod(0o700) - return - - orig = _.st.st_mode - new = orig - for i in updates: - ugo, pm, rwx = i - assert ugo in ['u', 'o', 'g'] - assert rwx in ['r', 'x', 'w'] - assert pm in ['+', '-'] - mode = ugorwx(ugo, rwx) - if pm == '+': - new = mode | new - else: - new = mode & new - if new != orig: - _.chmod(new) - _._stat = N - return _ - def ls(_, pathFilter: PathFilter = N): return (Dath(x) for x in _._path.glob('*') if not pathFilter or pathFilter.filter(x)) @@ -514,30 +281,6 @@ class Dath: reExt = newExt reSuffix = newExt - def tempFile( # TODO rename temp - _, - autoRemove: bool = T, - create: bool = T, - fmt: str = '{d}_{r}', - direct: bool = F, - ) -> Dath: - p = N - while p is none or p.exists(): - d = Time(fmt='%Y-%m-%d_%H-%M-%S-%f').toStr() - r = randomString(10) - p = _._path / fmt.format(d=d, r=r) - p = Dath(p) - if autoRemove: - p.addOnDelFunc(partial(p.remove, recursive=T)) - if create: - if direct: - p.makeDir() - else: - p.createFile() - return p - - temp = tempFile - def setAccModTime(_, access=F, modify=F) -> Self: if access is F: access = _.st.access @@ -550,7 +293,6 @@ class Dath: modify = datetime.now().timestamp() os.utime(_._path, (access, modify)) - _._stat = N return _ def newNameIfExists(_, fmt='{name}_{index:06}{suffixes}') -> Self: @@ -573,167 +315,28 @@ class Dath: """ return _._path.suffix.strip('.') - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ open files - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - def _encodeData(_, data: Any, joinIter=b'\n'): - if inst(data, Mapping): - raise NotImplementedError('Use defl._jq_ module') - elif inst(data, bytearray | bytes): - return data - elif inst(data, str): - return data.encode('utf8') - elif inst(data, Iterable): - bytz = bytearray() - for item in data: - bytz += _._encodeData(item) + joinIter - return bytz - raise TypeError(str(type(data))) - - def write(_, data: Any) -> Self: - with _._path.open('wb') as fp: - fp.write(_._encodeData(data)) - fp.flush() - return _ - - def writeJson(_, data: Any) -> Self: - with _._path.open('w') as fp: - jdump(data=data, output=fp) - return _ - - def append(_, data: Any, nl: bool = T) -> Self: - with _._path.open('ab') as fp: - fp.write(_._encodeData(data)) - if nl: - fp.write(b'\n') - fp.flush() - return _ - - def appendText(_, data: Any, **kargs) -> Self: - "#!depricated" - if inst(data, bytes | bytearray): #!depricate - mode = 'ab' #!depricate - else: #!depricate - mode = 'a' #!depricate - data = str(data) #!depricate - with _._path.open(mode, **kargs) as fp: #!depricate - fp.write(data) #!depricate - fp.flush() - return _ #!depricate - - def appendLine(_, data: Any, **kargs): #!depricate - "#!depricated" - _.appendText(f'{data}\n', **kargs) #!depricate - - def readLines(_, strip: str = '\n', removeEmpty: bool = T): - with _._path.open('r') as fp: - while line := fp.readline(): - if strip: - line = line.strip(strip) - if removeEmpty and not line.strip(): - continue - yield line - - def readLinesBin(_): - with _._path.open('rb') as fp: - while line := fp.readline(): - yield line - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ load - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - def load(_): - return { - '.json': _.loadJson, - '.jsonl': _.loadJsonl, - '.jsonc': _.loadJsonc, - '.toml': _.loadToml, - '.ini': _.loadIni, - }[_.suffix.lower()]() - - def loadJson(_): - try: - return jloads(_.readText()) - except json.decoder.JSONDecodeError as e: - e.add_note(str(_)) - raise e - - def loadJsonl(_): - with _.open('rb') as fp: - yield from JqLoad().fromFile(fp) - - def loadJsonc(_): - return jstyleson.loads(_.readText()) - - def loadToml(_): - return toml.loads(_.readText()) - - def loadIni(_): - from configparser import RawConfigParser - - config = RawConfigParser() - config.read(str(_._path)) - dictionary = {} - for section in config.sections(): - dictionary[section] = {} - for option in config.options(section): - dictionary[section][option] = config.get(section, option) - return dictionary - - jsonc = loadJsonc # TODO depricate - json = loadJson # TODO depricate - toml = loadToml # TODO depricate - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ save - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - def saveJson(_, data) -> N: - raise NotImplementedError('') - _.writeText(json.dumps(data, indent=2)) - - def saveJsonl(_, data) -> N: - raise NotImplementedError('') - with _._path.open('w') as fp: - for i in data: - JqDump(output=fp).dump(i) - - def saveJsonc(_) -> N: - raise NotImplementedError('') - - def saveToml(_) -> N: - raise NotImplementedError('') - - def saveIni(_) -> N: - raise NotImplementedError('') - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ dunder - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ + # == dunder def toJson(_): return str(_._path) def __repr__(_) -> str: - # TODO use __file__ to put module name - return f'{cl.mag}λ({cl.r}{cl.yel}{_._path.parent}{cl.r}/{cl.cyn}{_._path.name}{cl.r}{cl.mag}){cl.r}' + return f'λ({_._path})' def __str__(_) -> str: return str(_._path) def __hash__(_) -> str: - return _._path.__hash__() + return hash(_._path) def __del__(_): - if _._onDel: - for i in _._onDel: - i() - del _._path + if hasattr(_, '_onDel'): + if _._onDel: + for i in _._onDel: + i() + del _._path - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ parts - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ + # == parts @staticmethod def _normalizeFromComponents(*args) -> str: @@ -750,15 +353,14 @@ class Dath: # args = args.rstrip('/') # DONT DO THIS bad if '/' return args + # TODO move component to new path? @staticmethod def _fromParts(*args) -> pathlib.Path: args = [str(x) if isinstance(x, int) else x for x in args] a = Dath._normalizeFromComponents(*args) return pathlib.Path(a) - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ regex minipulate - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ + # == regex minipulate def regFindIter(_, reg: LiteralString | re.Pattern, /) -> list[re.Match]: if not isinstance(reg, re.Pattern): @@ -776,9 +378,7 @@ class Dath: reg = re.compile(reg) return reg.search(_.str) - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ force create - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ + # == force create def mkdirForce(_, mode=N): _.mkdir(**({'mode': mode} if mode else {}), parents=T, exist_ok=T) @@ -795,10 +395,9 @@ class Dath: target = str(target) _.symlink_to(target=target) - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ find - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ + # == find + # TODO depricate? @staticmethod def _subPathGen( subPath: Dath, @@ -823,6 +422,7 @@ class Dath: ): yield i + # TODO depricate? def find( _, pathFilter: MicroLang | Callable = N, @@ -853,485 +453,31 @@ class Dath: ): yield i - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ commands - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # from defl._runCommand_ import RunCom - RunComReturn = NewType('defl.RunCom', str) # defl.RunCom - - def execute(_, com, *args, **kargs) -> RunComReturn: - import defl - - if 'pipe' not in kargs: - kargs['pipe'] = false - l = [defl.which(com), _._path] - l.extend(args) - return defl.RunCom(l, **kargs) - - def vim(_, *args, startInInsert=F, **kargs) -> RunComReturn: - if startInInsert: - args += ('-c', 'startinsert') - return _.execute('vim', *args) - - def bash(_, *args, **kargs) -> RunComReturn: - pass - - return _.execute('bash', *args) - - def python(_, *args, **kargs) -> RunComReturn: - pass - - return _.execute('python', *args) - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ pathlib.Path properties - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - @property - def parts(_) -> List[str]: - return _._path.parts - - @property - def drive(_) -> str: - """ - Dath('/a/b/c.d.e.f').drive => '' - Dath('a/b/c.d.e.f').drive => '' - """ - return _._path.drive - - @property - def root(_) -> str: - """ - Dath('/a/b/c.d.e.f').root => '/' - Dath('a/b/c.d.e.f').root => '' - """ - return _._path.root - - @property - def anchor(_) -> str: - """ - Dath('/a/b/c.d.e.f').anchor => '/' - Dath('a/b/c.d.e.f').anchor => '' - """ - return _._path.anchor - - @property - def parents(_): # -> Generator[Dath]: - """ - Dath('/a/b/c.d.e.f').parents => - Dath('a/b/c.d.e.f').parents => - [x for x in Dath('a/b/c.d.e.f').parents] => [PosixPath('a/b'), PosixPath('a'), PosixPath('.')] - """ - for i in _._path.parents: - yield Dath(i) - - @property - def parent(_) -> Dath: - """ - Dath('/a/b/c.d.e.f').parent => Dath('/a/b') - Dath('a/b/c.d.e.f').parent => Dath('a/b') - """ - return Dath(_._path.parent) - - @property - def name(_) -> str: - """ - Dath('/a/b/c.d.e.f').name => 'c.d.e.f' - Dath('a/b/c.d.e.f').name => 'c.d.e.f' - """ - return _._path.name - - @property - def suffix(_) -> str: # | extension - """ - Dath('/a/b/c.d.e.f').suffix => '.f' - Dath('a/b/c.d.e.f').suffix => '.f' - """ - return _._path.suffix - - @property - def suffixes(_) -> list: - """ - Dath('/a/b/c.d.e.f').suffixes => ['.d', '.e', '.f'] - Dath('a/b/c.d.e.f').suffixes => ['.d', '.e', '.f'] - """ - return _._path.suffixes - - @property - def suffixesStr(_) -> list: - """ - Dath('/a/b/c.d.e.f').suffixes => '.d.e.f'] - Dath('a/b/c.d.e.f').suffixes => '.d.e.f'] - """ - return ''.join(_.suffixes) - - @property - def stem(_) -> str: - """ - Dath('/a/b/c.d.e.f').stem => 'c.d.e' - Dath('a/b/c.d.e.f').stem => 'c.d.e' - """ - return _._path.stem - - @property - def stemBase(_) -> str: - # not part of pathlib - """ - Dath('/a/b/c.d.e.f').stem => 'c' - Dath('a/b/c.d.e.f').stem => 'c' - """ - return re.sub(r'\..*', '', _._path.name) - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ pathlib.Path functions - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - def absolute(_): - return _.castToPath(_._path.absolute()) - - def as_posix(_): - return _.castToPath(_._path.as_posix()) - - def as_uri(_): - return _.castToPath(_._path.as_uri()) - - def chmod(_, mode, *, follow_symlinks=T): - return _.castToPath(_._path.chmod(mode, follow_symlinks=follow_symlinks)) - - def stat(_, *, follow_symlinks=T): - return _._path.stat(follow_symlinks=follow_symlinks) - - def exists(_) -> bool: - return _._path.exists() - - def expanduser(_): - return _.castToPath(_._path.expanduser()) - - def glob(_, pattern): - for i in _._path.glob(pattern): - yield _.castToPath(i) - - def group(_): - return _.castToPath(_._path.group()) - - def hardlink_to(_, target): - return _.castToPath(_._path.hardlink_to(target)) - - def is_absolute(_) -> bool: - return _._path.is_absolute() - - def is_block_device(_) -> bool: - return _._path.is_block_device() - - def is_char_device(_) -> bool: - return _._path.is_char_device() - - def is_dir(_) -> bool: - return _._path.is_dir() - - def is_fifo(_) -> bool: - return _._path.is_fifo() - - def is_file(_) -> bool: - return _._path.is_file() - - def is_mount(_) -> bool: - return _._path.is_mount() - - def is_relative_to(_, *other): - other = [x._path if isinstance(x, Dath) else x for x in other] - return _._path.is_relative_to(*other) - - def is_reserved(_) -> bool: - return _._path.is_reserved() - - def is_socket(_) -> bool: - return _._path.is_socket() - - def is_symlink(_) -> bool: - return _._path.is_symlink() - - def iterdir(_): - for i in _._path.iterdir(): - yield Dath(i) - - def joinpath(_, *other): - return _.castToPath(_._path.joinpath(*other)) - - def lchmod(_, mode): - return _.castToPath(_._path.lchmod(mode)) - - def link_to(_, target): - if isinstance(target, Dath | pathlib.Path): - target = str(target) - return _.castToPath(_._path.link_to(target)) - - def lstat(_): - return _.castToPath(_._path.lstat()) - - def match(_, pattern): - return _.castToPath(_._path.match(pattern)) - - def mkdir(_, mode=0o700, parents=F, exist_ok=F): - _._path.mkdir(mode=mode, parents=parents, exist_ok=exist_ok) - return _ - - def open(_, mode='r', buffering=-1, encoding=N, errors=N, newline=N): - return _.castToPath( - _._path.open( - mode=mode, - buffering=buffering, - encoding=encoding, - errors=errors, - newline=newline, - ) - ) - - def owner(_): - return _.castToPath(_._path.owner()) - - def read_bytes(_): - return _._path.read_bytes() - - def read_text(_, encoding=N, errors=N): - return _._path.read_text(encoding=encoding, errors=errors) - - def readlink(_): - return _.castToPath(_._path.readlink()) - - def readlinkFull(_): - path = _._path - while path.is_symlink(): - path = path.absolute() - path = path.readlink() - if not path.is_absolute(): - path = (_._path if _._path.is_dir() else _._path.parent) / path - path = path.resolve() - return _.castToPath(path) - - def relative_to(_, *other): - other = [x._path if isinstance(x, Dath) else x for x in other] - return _.clone(_._path.relative_to(*other)) - - def rename(_, target): - return _.castToPath(_._path.rename(str(target))) - - move = rename - mv = rename - - def replace(_, target): - return _.castToPath(_._path.replace(target)) - - def resolve(_, strict=F): - return _.castToPath(_._path.resolve(strict=strict)) - - def rglob(_, pattern): - raise NotImplementedError() - return _.castToPath(_._path.rglob(pattern)) - - def rmdir(_, logTo: callable = N): - if logTo: - logTo(f'{cl.mag}rmdir{cl.r} {cl.yel}{_._path}{cl.r}') - return _.castToPath(_._path.rmdir()) - - def samefile(_, other_path): - return _.castToPath(_._path.samefile(other_path)) - - def symlink_to( - _, - target, - # Ignored on unix. Required on win. - target_is_directory=F, - ): - target = target._path if isinstance(target, Dath) else target - return _.castToPath(_._path.symlink_to(target, target_is_directory=target_is_directory)) - - def touch(_, mode=0o600, exist_ok=T): - _.castToPath(_._path.touch(mode=mode, exist_ok=exist_ok)) - return _ - - def unlink(_, missing_ok=F): - return _.castToPath(_._path.unlink(missing_ok=missing_ok)) - - def with_name(_, name): - return _.castToPath(_._path.with_name(name)) - - def with_stem(_, stem): - return _.castToPath(_._path.with_stem(stem)) - - def with_suffix(_, suffix): - return _.castToPath(_._path.with_suffix(suffix)) - - def write_bytes(_, data): - _._path.write_bytes(data) - return _ - - def write_text(_, data, encoding=N, errors=N, newline=N): - _._path.write_text(data) - return _ - - def __truediv__(_, arg): - return _.clone(_._path / str(arg)) - - # == camel - asPosix = as_posix - asUri = as_uri - hardlinkTo = hardlink_to - isAbsolute = is_absolute - isBlockDevice = is_block_device - isCharDevice = is_char_device - isDir = is_dir - isFifo = is_fifo - isFile = is_file - isMount = is_mount - isRelativeTo = is_relative_to - isReserved = is_reserved - isSocket = is_socket - isSymlink = is_symlink - linkTo = link_to - readBytes = read_bytes - readText = read_text - relativeTo = relative_to - symlinkTo = symlink_to - withName = with_name - withStem = with_stem - withSuffix = with_suffix - writeBytes = write_bytes - writeText = write_text - - # == lower - asposix = as_posix - asuri = as_uri - hardlinkto = hardlink_to - isabsolute = is_absolute - isblockdevice = is_block_device - ischardevice = is_char_device - isdir = is_dir - isfifo = is_fifo - isfile = is_file - ismount = is_mount - isrelativeto = is_relative_to - isreserved = is_reserved - issocket = is_socket - issymlink = is_symlink - linkto = link_to - readbytes = read_bytes - readtext = read_text - relativeto = relative_to - symlinkto = symlink_to - withname = with_name - withstem = with_stem - withsuffix = with_suffix - writebytes = write_bytes - writetext = write_text - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ comparison overrides - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - + # == comparison def __lt__(_, k): - return _._path.__lt__(_toPathlib(k)) + return _._path.__lt__(_.__class__(k)._path) def __eq__(_, k): - return _._path.__eq__(_toPathlib(k)) + # print(type(_)) + # print(_.__slots__) + return _._path.__eq__(_.__class__(k)._path) def __le__(_, k): - return _._path.__le__(_toPathlib(k)) + return _._path.__le__(_.__class__(k)._path) def __gt__(_, k): - return _._path.__gt__(_toPathlib(k)) + return _._path.__gt__(_.__class__(k)._path) def __ge__(_, k): - return _._path.__ge__(_toPathlib(k)) - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ Assertions - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - def assertExists(_) -> Self: - if not _.exists(): - raise DathNotExistsError() - return _ - - def assertIsAbsolute(_) -> Self: - if not _.isAbsolute(): - raise DathNotIsAbsoluteError() - return _ - - def assertIsBlockDevice(_) -> Self: - if not _.isBlockDevice(): - raise DathNotIsBlockDeviceError() - return _ - - def assertIsCharDevice(_) -> Self: - if not _.isCharDevice(): - raise DathNotIsCharDeviceError() - return _ - - def assertIsDir(_) -> Self: - if not _.isDir(): - raise DathNotIsDirError() - return _ - - def assertIsFifo(_) -> Self: - if not _.isFifo(): - raise DathNotIsFifoError() - return _ - - def assertIsFile(_) -> Self: - if not _.isFile(): - raise DathNotIsFileError() - return _ - - def assertIsMount(_) -> Self: - if not _.isMount(): - raise DathNotIsMountError() - return _ - - def assertIsRelativeTo(_) -> Self: - if not _.isRelativeTo(): - raise DathNotIsRelativeToError() - return _ - - def assertIsReserved(_) -> Self: - if not _.isReserved(): - raise DathNotIsReservedError() - return _ - - def assertIsSocket(_) -> Self: - if not _.isSocket(): - raise DathNotIsSocketError() - return _ - - def assertIsSymlink(_) -> Self: - if not _.isSymlink(): - raise DathNotIsSymlinkError() - return _ + return _._path.__ge__(_.__class__(k)._path) -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ -# ◼◼ DEPRICATE -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ +_PathPatch(Dath) # | patch onto Dath to avoid IDE auto resolution - -def _toPathlib(path): #!depricate - if isinstance(path, Dath): #!depricate - return path._path #!depricate - elif isinstance(path, str): #!depricate - return pathlib.Path(path) #!depricate - elif isinstance(path, pathlib.Path): #!depricate - return path #!depricate - else: #!depricate - raise DathValueError(f'Can not cast to path {path}') #!depricate - - -def globPath(path: str | Dath) -> list[Dath]: #!depricate - depricateWarning('move to lazy class') #!depricate - return [x for x in Dath(path).resolveFull()] #!depricate - - -def globPathGen(path: str | Dath) -> Iterable[Dath]: #!depricate - depricateWarning('move to lazy class') #!depricate - return Dath(path).resolveFull() #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate +#!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate #!depricate def currentWorkingDir(): #!depricate diff --git a/defl/_run_.py b/defl/_run_.py index 877ebe9..0576b75 100644 --- a/defl/_run_.py +++ b/defl/_run_.py @@ -306,7 +306,7 @@ class Run: return _ # == run flow : run - def writeLog(_) -> Self: + def writeLog(_, dry: bool = F) -> Self: logTo = N logText = N if _.logTo is U or _.logTo is T: @@ -319,15 +319,17 @@ class Run: logText = _.cmd if _.logText and logText: logText = f'{cl.grn}[{cl.r}{_.logText}{cl.grn}]{cl.r} {logText}' + if logText and dry: + logText += f' # {cl.red}DRY RUN{cl.r}' if logTo and logText: logTo(logText) return _ - def run(_, out, err, inp: bool = F) -> Self: + def run(_, out, err, inp: bool = F, dry: bool = F) -> Self: _.assertNotRan() _._ran = T - _.writeLog() + _.writeLog(dry=dry) out = Run.pipeResolve(out) err = Run.pipeResolve(err) @@ -335,7 +337,14 @@ class Run: # print({'cmd': _.cmd, 'popen': _.popenKW.args(), 'stdout': out, 'stderr': err, 'stdin': inp}) # print(_.cmd, _.popenKW.args()) - _._popen = Popen(_.cmd, **_.popenKW.args(), stdout=out, stderr=err, stdin=inp, **_._supressSigInt) + _._popen = Popen( + _.cmd if not dry else ['true'], # | dry run just execute `true` + **_.popenKW.args(), + stdout=out, + stderr=err, + stdin=inp, + **_._supressSigInt, + ) if out is PIPE: _._out = AutoReader(_._popen.stdout, threadMode=_._threadedRead) @@ -395,15 +404,15 @@ class Run: if not _.success: errStr = '\n'.join( [ - f'{cl.red}{_.rc}{cl.r}', - f' # {cl.yel}{_._origCmd}{cl.r}', - f' # {cl.org}{_.cmdStr.encode()}{cl.r}', + f'# {cl.wht}returncode{cl.r}={cl.red}{_.rc}{cl.r}', + f'# {cl.yel}{_._origCmd}{cl.r}', + f'# {cl.org}{_.cmdStr.encode()}{cl.r}', ] ) byz = [] byz += [x for x in _.out] if _._out and out else [] byz += [x for x in _.err] if _._err and err else [] - errStr += '\n' + cl.red + ((b' | ' if byz else b'') + b'\n | '.join(byz)).decode() + cl.r + errStr += '\n' + cl.red + ((b'| ' if byz else b'') + b'\n | '.join(byz)).decode() + cl.r e = RunReturnCodeError(errStr) e.msg = byz raise e @@ -411,6 +420,12 @@ class Run: assSuc = assertSuccess + def printErrIfFail(_): + try: + _.assertSuccess() + except RunReturnCodeError as e: + log.info(str(e), t='e') + # | ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ # | ◼◼ Data # | ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ @@ -509,6 +524,7 @@ class Run: _, sig: KillSig, # | defl.KillSigList.SigInt logInfo: bool = T, + wait: bool = T, ): from .proc_ import Kill, KillSig # | lazy @@ -517,7 +533,7 @@ class Run: if _._popen.poll() is N: if logInfo: log.info(f'{cl.red}kill{cl.r} -s {cl.mag}sig{sig}{cl.r} {cl.cyn}{_._popen.pid}{cl.r}', tgt='err') - Kill(signal=sig, sudo=T).kill(_._popen.pid) + Kill(signal=sig, sudo=T).kill(_._popen.pid, wait=wait) return _ # == process diff --git a/defl/_shell_.py b/defl/_shell_.py index a64a2ab..7e3fcb1 100644 --- a/defl/_shell_.py +++ b/defl/_shell_.py @@ -180,6 +180,8 @@ class ShellQuote: # @property # def string(_) -> str: # return str(_) + def toJson(_) -> str: + return _.cliJoin ShQu = ShellQuote diff --git a/defl/_string_.py b/defl/_string_.py index aef2946..c470142 100644 --- a/defl/_string_.py +++ b/defl/_string_.py @@ -45,7 +45,7 @@ def dictToStr(d: dict, k='', v='', join: str = ' ', joinItem: str = '=') -> str return d -def colorDict(d: dict, k=cl.mag, v=cl.cyn, join: str = N, joinItem: str = '=') -> str | dict[str, str]: +def colorDict(d: dict, k=cl.yel, v=cl.cyn, join: str = N, joinItem: str = '{cl.grn}=') -> str | dict[str, str]: return dictToStr(d=d, k=k, v=v, join=join, joinItem=joinItem) diff --git a/defl/_thread_.py b/defl/_thread_.py index 3da4080..1889418 100644 --- a/defl/_thread_.py +++ b/defl/_thread_.py @@ -99,14 +99,17 @@ class ThreadLock: def __post_init__(_) -> N: _.unlock() - def lock(_) -> N: + def lock(_) -> Self: _._ready.clear() + return _ - def wait(_) -> N: - _._ready.wait(timeout=N) + def wait(_, timeout: int | N = N) -> Self: + _._ready.wait(timeout=timeout) + return _ - def unlock(_) -> N: + def unlock(_) -> Self: _._ready.set() + return _ def __enter__(_): _._ready.wait(timeout=N) diff --git a/defl/_timeUtils_.py b/defl/_timeUtils_.py index 0480904..0b0b1db 100644 --- a/defl/_timeUtils_.py +++ b/defl/_timeUtils_.py @@ -64,7 +64,7 @@ class ETA: percComp: float, secOverride: int = Undefined, # | `Undefined` will lap the timer, `None` skips average, `int` overrides timer.lap() ) -> TimeDelta: - assert percComp <= 1 and percComp >= 0, percComp + # assert percComp <= 1 and percComp >= 0, percComp percComp = 0.000001 if percComp == 0 else percComp if _.timer is not None and secOverride is not None: _.rollAvg.add(_.timer.lap().seconds if secOverride is Undefined else secOverride) @@ -98,6 +98,8 @@ class ProgressPrint: def __post_init__(_): if isinstance(_.completeAmt, Collection): _.completeAmt = len(_.completeAmt) + if not _.eta: + _.eta = N def inc( _, diff --git a/defl/_time_.py b/defl/_time_.py index 8d72f6b..1a13cf3 100644 --- a/defl/_time_.py +++ b/defl/_time_.py @@ -319,6 +319,7 @@ class TimeFormat(Enumer): time = '%H:%M:%S' pretty = '%a %d %b %y %H:%M:%S' prettyDay = '%a %d %b %y' + ymdhmsfDash = '%Y-%m-%d-%H-%M-%S-%f' @dataclass(slots=True, kw_only=False, frozen=False) @@ -330,7 +331,12 @@ class Time: LocalTz: ClassVar = TimeZone(TimeZone.localOffset()) tz: TimeZone = Undefined autoAddTzStr: bool = True # only effects __str__ + TZ: ClassVar[Type] = TimeZone + TD: ClassVar[Type] = TimeDelta + TU: ClassVar[Type] = TimeUnit + # TODO when getting dt the timezone is TimeZone and not timezone.utc type date = date.replace(tzinfo=timezone.utc) + # TODO so dataframes fail on TimeZone type Fmt = TimeFormat __add__ = Obj.__add__ diff --git a/defl/fileTypes_.py b/defl/fileTypes_.py new file mode 100755 index 0000000..b47bdd9 --- /dev/null +++ b/defl/fileTypes_.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python + +from collections import Counter +import sys, re, os, enum, itertools +from functools import partial, partialmethod +from time import sleep +from dataclasses import dataclass, field +from operator import itemgetter +from defl import log, cl, Dath, Time, Run, sp +from defl._typing_ import * +from defl._typing_ import T, F, N, U +from defl._rich_ import * +from defl._pydantic_ import * +import defl + + +class Type(defl.Enumer): + Video = enum.auto() + Audio = enum.auto() + Text = enum.auto() + Dir = enum.auto() + + +fileTypeToExt = { + Type.Video: [ + 'mp4', + 'avi', + 'mkv', + 'mov', + 'wmv', + 'm4v', + 'gif', + '3g2', + 'mpg', + '3gpp', + 'mod', + 'mov', + '3gp', + 'rmvb', + 'm2ts', + 'webm', + 'mts', + ], + Type.Audio: [ + 'mp3', + 'flac', + 'm4a', + 'wav', + 'webm', + 'aiff', + 'wma', + 'opus', + 'aac', + ], + Type.Text: [ + 'log', + 'sh', + 'txt', + 'md', + ], +} + + +@dataclass(slots=T, kw_only=T) +class FileType: + def ext(_, path: Dath) -> list[Type]: + if path.isDir(): + return [Type.Dir] + ext = path.suffix.strip('.').lower() + match = [k for k, v in fileTypeToExt.items() if ext in v] + if defl.amCliEntryPoint(): + log.info(' '.join(match)) + return match + + def exts(_, *paths: Dath) -> dict[Dath, list[Type]]: + res = {x: _.ext(x) for x in paths} + if defl.amCliEntryPoint(): + log.info(lambda jc: res) + return res + + def isType(_, path: Dath, typ: Type, only: bool = F) -> bool: + if only: + r = _.ext(path=path) == [typ] + else: + r = typ in _.ext(path=path) + if defl.amCliEntryPoint(): + sys.exit(0 if r else 1) + return r + + def isVideo(_, path: Dath, only: bool = F) -> bool: + r = _.isType(typ=Type.Video, path=path, only=only) + if defl.amCliEntryPoint(): + sys.exit(0 if r else 1) + return r + + def isAudio(_, path: Dath, only: bool = F) -> bool: + r = _.isType(typ=Type.Audio, path=path, only=only) + if defl.amCliEntryPoint(): + sys.exit(0 if r else 1) + return r + + def isText(_, path: Dath, only: bool = F) -> bool: + r = _.isType(typ=Type.Text, path=path, only=only) + if defl.amCliEntryPoint(): + sys.exit(0 if r else 1) + return r + + +# TODO make cli file in bashtools +# if __name__ == '__main__': +# with defl.suppressStackTraceCM(defl.ArgFromObjError): +# defl.ParseObj(CLI, mapSubParsers={'-': 'ext'}).parse(sys.argv[1:]) diff --git a/defl/lsblk_.py b/defl/lsblk_.py index 62ef0a1..8596367 100644 --- a/defl/lsblk_.py +++ b/defl/lsblk_.py @@ -4,8 +4,9 @@ # ` udevadm info --query=all --name=/dev/sdb from dataclasses import dataclass, field +import dataclasses -from defl import Path, Obj, Run, Dath +from defl import Path, Obj, Run, Dath, log from defl._typing_ import * import defl from defl._pydantic_ import * @@ -14,15 +15,15 @@ from defl._pydantic_ import * @dataclass(slots=T, kw_only=T, frozen=F) class DiskTree: root: Path | N - byId: N | list[Path] = N - byPath: N | list[Path] = N - byDiskseq: N | list[Path] = N - byPartlabel: N | list[Path] = N - byUuid: N | list[Path] = N - byPartuuid: N | list[Path] = N - byLoopInode: N | list[Path] = N - byLoopRef: N | list[Path] = N - byLabel: N | list[Path] = N + byId: N | list[Path] = field(default_factory=list) + byPath: N | list[Path] = field(default_factory=list) + byDiskseq: N | list[Path] = field(default_factory=list) + byPartlabel: N | list[Path] = field(default_factory=list) + byUuid: N | list[Path] = field(default_factory=list) + byPartuuid: N | list[Path] = field(default_factory=list) + byLoopInode: N | list[Path] = field(default_factory=list) + byLoopRef: N | list[Path] = field(default_factory=list) + byLabel: N | list[Path] = field(default_factory=list) def has(_, path: Path): assert inst(path, Path) @@ -48,206 +49,213 @@ class DiskTree: if a.isEmptyLeaf(res, name): a[res, name] = [] a[res, name].append(path) - return {k: DiskTree(root=k, **v) for k, v in a.items()} + return {k: DiskTree(root=k, **v) for k, v in a.data.items()} toJson = Obj.toJson +LsblkCollection = TypeVar('LsblkCollection') +Lsblk = TypeVar('Lsblk') + + @dataclass(slots=T, kw_only=T, frozen=F) class Lsblk: + _diskTree: DiskTree | N = N + _meta: dict = field(default_factory=dict, kw_only=T, repr=T) + _keep: bool = T # | hidden attribute which filters out items when descending + _parent: Self | N = N + + @property + def parent(_) -> Self: + return _._parent + def toJson(_, keys: list | str = None) -> dict: if not keys: - keys = ['path', 'typee', 'mountpoint', 'fstype', 'children'] - elif keys[0] == 'all': + keys = ['path', 'typee', 'mountpoint', 'fstype'] + elif keys[0] == 'all' or keys == 'all': keys = _.__slots__ - d = {k: getattr(_, k) for k in keys} - if 'children' in d: - d['children'] = [x.toJson(keys=keys) for x in d['children']] - return d + keep = ['_keep'] if not _._keep and '_keep' not in keys else [] + return {k: _[k] for k in keys + keep} + + @classmethod + def FromData(cls, data: dict) -> Self: + assert inst(data, dict) + data['path'] = Path(data['path']) + data['fsroots'] = [x for x in data['fsroots'] if x] + data['mountpoints'] = [x for x in data['mountpoints'] if x] + children = data.get('children', []) + data['children'] = [] + data = {k: data[v] for k, v in cls.Translation.items()} + parent = cls(**data) + children = [cls.FromData(data=x) for x in children] + for child in children: + child._parent = parent + parent.children = children + return parent def __str__(_) -> str: return str(_.toJson()) - @classmethod - def Run(cls, *devs) -> 'LsblkCollection': - return LsblkCollection(devs=devs) - def __iter__(_) -> defl.Generator[Self, N, N]: yield _ for i in _.children: yield i - # == repr - path: Path = field(repr=T) - typee: str = field(repr=T) - mountpoint: str | N = field(repr=T) - fstype: str | N = field(repr=T) - children: list[Self] = field(repr=T) + def __getitem__(_, k): + if inst(k, list | tuple): + return _.toJson(keys=k) + if hasattr(_, k): + return getattr(_, k) + elif k in Lsblk.TranslationBackwords: + return getattr(_, Lsblk.TranslationBackwords[k]) + elif k in _._meta: + return _._meta[k] + getattr(_, k) # | raise AttributeError() + raise NotImplementedError('dont get here') - # == default - name: str = field(repr=F) - alignment: int = field(repr=F) - idLink: str | N = field(repr=F) - idd: str | N = field(repr=F) - discAln: int = field(repr=F) - dax: bool = field(repr=F) - discGran: str = field(repr=F) - diskSeq: int = field(repr=F) - discMax: str = field(repr=F) - discZero: bool = field(repr=F) - fsavail: str | N = field(repr=F) - fsroots: list = field(repr=F) - fssize: str | N = field(repr=F) - fsused: str | N = field(repr=F) - fsusePerc: str | N = field(repr=F) - fsver: str | N = field(repr=F) - group: str = field(repr=F) - hctl: str | N = field(repr=F) - hotplug: bool = field(repr=F) - kname: str = field(repr=F) - label: str | N = field(repr=F) - logSec: int = field(repr=F) - majMin: str = field(repr=F) - maj: str = field(repr=F) - minn: str = field(repr=F) - minIo: int = field(repr=F) - mode: str = field(repr=F) - model: str | N = field(repr=F) - mq: str = field(repr=F) - optIo: int = field(repr=F) - owner: str = field(repr=F) - partflags: str | N = field(repr=F) - partlabel: str | N = field(repr=F) - partn: int | N = field(repr=F) - parttype: str | N = field(repr=F) - parttypename: str | N = field(repr=F) - partuuid: str | N = field(repr=F) - phySec: int = field(repr=F) - pkname: str | N = field(repr=F) - pttype: str | N = field(repr=F) - ptuuid: str | N = field(repr=F) - ra: int = field(repr=F) - rand: bool = field(repr=F) - rev: str | N = field(repr=F) - rm: bool = field(repr=F) - ro: bool = field(repr=F) - rota: bool = field(repr=F) - rqSize: int | N = field(repr=F) - sched: str | N = field(repr=F) - serial: str | N = field(repr=F) - size: str = field(repr=F) - start: int | N = field(repr=F) - state: str | N = field(repr=F) - subsystems: str = field(repr=F) - mountpoints: list = field(repr=F) - tran: str | N = field(repr=F) - uuid: str | N = field(repr=F) - vendor: str | N = field(repr=F) - wsame: str = field(repr=F) - wwn: str | N = field(repr=F) - zoned: str = field(repr=F) - zoneSz: str = field(repr=F) - zoneWgran: str = field(repr=F) - zoneApp: str = field(repr=F) - zoneNr: int = field(repr=F) - zoneOmax: int = field(repr=F) - zoneAmax: int = field(repr=F) + def lineage(_) -> Generator[Self, N, N]: + parent = _ + while parent: + yield parent + parent = parent.parent - # == cast data - translation: ClassVar[dict[str, str]] = { - 'alignment': 'alignment', - 'idLink': 'id-link', - 'idd': 'id', - 'discAln': 'disc-aln', - 'dax': 'dax', - 'discGran': 'disc-gran', - 'diskSeq': 'disk-seq', - 'discMax': 'disc-max', - 'discZero': 'disc-zero', - 'fsavail': 'fsavail', - 'fsroots': 'fsroots', - 'fssize': 'fssize', - 'fstype': 'fstype', - 'fsused': 'fsused', - 'fsusePerc': 'fsuse%', - 'fsver': 'fsver', - 'group': 'group', - 'hctl': 'hctl', - 'hotplug': 'hotplug', - 'kname': 'kname', - 'label': 'label', - 'logSec': 'log-sec', - 'majMin': 'maj:min', - 'maj': 'maj', - 'minn': 'min', - 'minIo': 'min-io', - 'mode': 'mode', - 'model': 'model', - 'mq': 'mq', - 'name': 'name', - 'optIo': 'opt-io', - 'owner': 'owner', - 'partflags': 'partflags', - 'partlabel': 'partlabel', - 'partn': 'partn', - 'parttype': 'parttype', - 'parttypename': 'parttypename', - 'partuuid': 'partuuid', - 'path': 'path', - 'phySec': 'phy-sec', - 'pkname': 'pkname', - 'pttype': 'pttype', - 'ptuuid': 'ptuuid', - 'ra': 'ra', - 'rand': 'rand', - 'rev': 'rev', - 'rm': 'rm', - 'ro': 'ro', - 'rota': 'rota', - 'rqSize': 'rq-size', - 'sched': 'sched', - 'serial': 'serial', - 'size': 'size', - 'start': 'start', - 'state': 'state', - 'subsystems': 'subsystems', - 'mountpoint': 'mountpoint', - 'mountpoints': 'mountpoints', - 'tran': 'tran', - 'typee': 'type', - 'uuid': 'uuid', - 'vendor': 'vendor', - 'wsame': 'wsame', - 'wwn': 'wwn', - 'zoned': 'zoned', - 'zoneSz': 'zone-sz', - 'zoneWgran': 'zone-wgran', - 'zoneApp': 'zone-app', - 'zoneNr': 'zone-nr', - 'zoneOmax': 'zone-omax', - 'zoneAmax': 'zone-amax', - 'children': 'children', - } + alignment: int = field(repr=F, metadata={'translation': 'alignment'}) + children: list[Self] = field(repr=T, metadata={'translation': 'children'}) + dax: bool = field(repr=F, metadata={'translation': 'dax'}) + discAln: int = field(repr=F, metadata={'translation': 'disc-aln'}) + discGran: str = field(repr=F, metadata={'translation': 'disc-gran'}) + discMax: str = field(repr=F, metadata={'translation': 'disc-max'}) + discZero: bool = field(repr=F, metadata={'translation': 'disc-zero'}) + diskSeq: int = field(repr=F, metadata={'translation': 'disk-seq'}) + fsavail: str | N = field(repr=F, metadata={'translation': 'fsavail'}) + fsroots: list = field(repr=F, metadata={'translation': 'fsroots'}) + fssize: str | N = field(repr=F, metadata={'translation': 'fssize'}) + fstype: str | N = field(repr=T, metadata={'translation': 'fstype'}) + fsused: str | N = field(repr=F, metadata={'translation': 'fsused'}) + fsusePerc: str | N = field(repr=F, metadata={'translation': 'fsuse%'}) + fsver: str | N = field(repr=F, metadata={'translation': 'fsver'}) + group: str = field(repr=F, metadata={'translation': 'group'}) + hctl: str | N = field(repr=F, metadata={'translation': 'hctl'}) + hotplug: bool = field(repr=F, metadata={'translation': 'hotplug'}) + idd: str | N = field(repr=F, metadata={'translation': 'id'}) + idLink: str | N = field(repr=F, metadata={'translation': 'id-link'}) + kname: str = field(repr=F, metadata={'translation': 'kname'}) + label: str | N = field(repr=F, metadata={'translation': 'label'}) + logSec: int = field(repr=F, metadata={'translation': 'log-sec'}) + majMin: str = field(repr=F, metadata={'translation': 'maj'}) + maj: str = field(repr=F, metadata={'translation': 'maj:min'}) + minIo: int = field(repr=F, metadata={'translation': 'min-io'}) + minn: str = field(repr=F, metadata={'translation': 'min'}) + model: str | N = field(repr=F, metadata={'translation': 'model'}) + mode: str = field(repr=F, metadata={'translation': 'mode'}) + mountpoints: list = field(repr=F, metadata={'translation': 'mountpoints'}) + mountpoint: str | N = field(repr=T, metadata={'translation': 'mountpoint'}) + mq: str = field(repr=F, metadata={'translation': 'mq'}) + name: str = field(repr=F, metadata={'translation': 'name'}) + optIo: int = field(repr=F, metadata={'translation': 'opt-io'}) + owner: str = field(repr=F, metadata={'translation': 'owner'}) + partflags: str | N = field(repr=F, metadata={'translation': 'partflags'}) + partlabel: str | N = field(repr=F, metadata={'translation': 'partlabel'}) + partn: int | N = field(repr=F, metadata={'translation': 'partn'}) + parttypename: str | N = field(repr=F, metadata={'translation': 'parttypename'}) + parttype: str | N = field(repr=F, metadata={'translation': 'parttype'}) + partuuid: str | N = field(repr=F, metadata={'translation': 'partuuid'}) + path: Path = field(repr=T, metadata={'translation': 'path'}) + phySec: int = field(repr=F, metadata={'translation': 'phy-sec'}) + pkname: str | N = field(repr=F, metadata={'translation': 'pkname'}) + pttype: str | N = field(repr=F, metadata={'translation': 'pttype'}) + ptuuid: str | N = field(repr=F, metadata={'translation': 'ptuuid'}) + ra: int = field(repr=F, metadata={'translation': 'ra'}) + rand: bool = field(repr=F, metadata={'translation': 'rand'}) + rev: str | N = field(repr=F, metadata={'translation': 'rev'}) + rm: bool = field(repr=F, metadata={'translation': 'rm'}) + ro: bool = field(repr=F, metadata={'translation': 'ro'}) + rota: bool = field(repr=F, metadata={'translation': 'rota'}) + rqSize: int | N = field(repr=F, metadata={'translation': 'rq-size'}) + sched: str | N = field(repr=F, metadata={'translation': 'sched'}) + serial: str | N = field(repr=F, metadata={'translation': 'serial'}) + size: str = field(repr=F, metadata={'translation': 'size'}) + start: int | N = field(repr=F, metadata={'translation': 'start'}) + state: str | N = field(repr=F, metadata={'translation': 'state'}) + subsystems: str = field(repr=F, metadata={'translation': 'subsystems'}) + tran: str | N = field(repr=F, metadata={'translation': 'tran'}) + typee: str = field(repr=T, metadata={'translation': 'type'}) + uuid: str | N = field(repr=F, metadata={'translation': 'uuid'}) + vendor: str | N = field(repr=F, metadata={'translation': 'vendor'}) + wsame: str = field(repr=F, metadata={'translation': 'wsame'}) + wwn: str | N = field(repr=F, metadata={'translation': 'wwn'}) + zoneAmax: int = field(repr=F, metadata={'translation': 'zone-amax'}) + zoneApp: str = field(repr=F, metadata={'translation': 'zone-app'}) + zoned: str = field(repr=F, metadata={'translation': 'zoned'}) + zoneNr: int = field(repr=F, metadata={'translation': 'zone-nr'}) + zoneOmax: int = field(repr=F, metadata={'translation': 'zone-omax'}) + zoneSz: str = field(repr=F, metadata={'translation': 'zone-sz'}) + zoneWgran: str = field(repr=F, metadata={'translation': 'zone-wgran'}) - diskTree: DiskTree | N + +Lsblk.Translation: ClassVar[dict[str, str]] = { + x.name: x.metadata['translation'] for x in dataclasses.fields(Lsblk) if not x.name.startswith('_') +} +Lsblk.TranslationBackwords: ClassVar[dict[str, str]] = {v: k for k, v in Lsblk.Translation.items()} @dataclass(slots=T, kw_only=T, frozen=F) class LsblkCollection: - devs: list[str] = field(default_factory=list, kw_only=F, repr=T) - # TODO make iterable - lsblks: list[Lsblk] = None - diskTree: dict[Path, DiskTree] = None + _data: list[Lsblk] - def __post_init__(_): - cmd = ['lsblk', '-J', '-O', *_.devs] - data = Run(cmd).run(T, T).wait().assSuc().json()['blockdevices'] - _.diskTree = DiskTree.Run() - _.lsblks = _._fromData(data) + @staticmethod + def Command(*dev): + if len(dev) == 1 and inst(dev, list | tuple): + dev = dev[0] + return ['lsblk', '--json', '--output-all', *dev] + + @classmethod + def FromRun(cls, *devs) -> Self: + cmd = devs + data = Run(cmd).run(T, T).wait().assSuc().json() + return cls.FromData(data=data) + + @classmethod + def FromData(cls, data: list[dict]) -> Self: + if 'blockdevices' in data: + data = data['blockdevices'] + assert inst(data, list | tuple) + return cls(_data=[Lsblk.FromData(data=x) for x in data]) + + _diskTree: DiskTree | type(...) = field(default=..., repr=F) # | property not set + + @property + def dt(_) -> DiskTree: + return _.diskTree + + @property + def diskTree(_) -> DiskTree: + # | use cache property incase lsblk is from data like using ssh + if _._diskTree is ...: + _._diskTree = _._diskTreeGet() + return _._diskTree + + def _diskTreeGet(_) -> DiskTree: + diskTree = DiskTree.Run() + _.populateDiskLinks(diskTree=diskTree) + return diskTree + + def populateDiskLinks(_, diskTree: DiskTree = N) -> Self: + if not diskTree: + diskTree = DiskTree.Run() + dtName = {k.name: v for k, v in diskTree.items()} + for lsblk in _.toList(): + if res := diskTree.get(lsblk.path): + lsblk._diskTree = res + elif res := dtName.get(lsblk.kname): # | i.e. "dm-2" + lsblk._diskTree = res + # else: + # print(lsblk.path) + return _ def toList(_, _items: list[Self] | Self = None) -> Generator[Any | Self, Any, None]: if _items is None: - _items = _.lsblks + _items = _._data if inst(_items, list): for i in _items: for j in _.toList(i): @@ -260,7 +268,12 @@ class LsblkCollection: else: yield _items - def filter(_, match: Callable, /, _items: list[Self] | Self = None) -> Generator[Any | Self, Any, None]: + def filter( + _, + match: Callable, + /, + _items: list[Self] | Self = None, + ) -> Generator[Any | Self, Any, None]: for i in _.toList(): if match(i): yield i @@ -280,25 +293,94 @@ class LsblkCollection: raise ValueError(f'Results length {l} > 1') return res[0] - def _fromData(_, data: list | dict) -> list[Lsblk]: - if inst(data, list): - data = [_._fromData(x) for x in data] - elif inst(data, dict): - data['path'] = Path(data['path']) - data['fsroots'] = [x for x in data['fsroots'] if x] - data['mountpoints'] = [x for x in data['mountpoints'] if x] - - data['children'] = _._fromData(data['children']) if data.get('children') else [] - data = Lsblk( - **{k: data[v] for k, v in Lsblk.translation.items()}, - diskTree=_.diskTree.get(data['path'], DiskTree(root=None)), - ) - return data + def getAssOne(_) -> Lsblk: + assert len(_._data) == 1 + return _._data[0] def __iter__(_) -> defl.Generator[Lsblk, N, N]: - for i in _.lsblks: + for i in _._data: yield i + def __getitem__(_, k): + if not inst(k, tuple | list): + k = [k] + # if 'children' not in k: + # k.append('children') + return _.descendDict(lambda x: x[k]) + + def descend( + _, + process: Callable | dict[Any, Callable], + _lsblk: Any = N, + _i: int = 0, + ) -> Self: + # TODO depth first breadth first + # == iterate + if _lsblk is N: + desc = _.descend(process=process, _lsblk=_._data, _i=_i + 1) + desc = [x for x in desc if x] + return _.__class__(_data=desc) + + if inst(_lsblk, list): + res = [_.descend(process=process, _lsblk=x, _i=_i + 1) for x in _lsblk] + res = [x for x in res if x and x._keep] + return res + + _lsblk.children = _.descend(process=process, _lsblk=_lsblk.children, _i=_i + 1) + + # == evaluate + res = _._applyDictCallable(process=process, value=_lsblk) + + # == result + if callable(process): + res = {'result': res} + _lsblk._meta.update(res) + # print(_lsblk.name, res, _lsblk._meta) + # print(' ' * _i, process.keys() if inst(process,dict) else process, _lsblk.name, 'keep', _lsblk._keep, 'result', _lsblk._meta) + if _lsblk._keep: + return _lsblk + + def descendDict( + _, + process: Callable | dict[Any, Callable], + dropNone: bool = F, + _lsblk: Any = N, + _tmpKey: Any = N, + ) -> Self: + if _lsblk is N: + _tmpKey = defl.randomString() + _.descend(process={_tmpKey: partial(_._applyDictCallable, process=process)}) + desc = _.descendDict(process=N, _lsblk=_._data, _tmpKey=_tmpKey, dropNone=dropNone) + desc = [x for x in desc if x] + return desc + elif inst(_lsblk, list): + desc = [_.descendDict(process=N, _lsblk=x, _tmpKey=_tmpKey, dropNone=dropNone) for x in _lsblk] + desc = [x for x in desc if x] + return desc + + res = _lsblk._meta[_tmpKey] + del _lsblk._meta[_tmpKey] + + children = [_.descendDict(process=N, _lsblk=x, _tmpKey=_tmpKey, dropNone=dropNone) for x in _lsblk.children] + if dropNone: + children = [x for x in children if x] + + if _lsblk._keep: + if dropNone and not res: + return + return {'result': res} | {'children': children} + + @staticmethod + def _applyDictCallable(value: Any, process: Callable | dict[Any, Callable]): + if callable(process): + return process(value) + else: + assert 'children' not in process + return {k: v(value) for k, v in process.items()} + + def toJson(_, keys: list = N): + return _.descendDict(process=lambda x: x[['name', 'type', 'mountpoints']]) + @dataclass(slots=T, kw_only=T, frozen=F) class LoopDev: @@ -365,7 +447,7 @@ class LoopDev: return dev -@dantDC() +@dataclass(slots=T, kw_only=T, frozen=F) class _MountPoint: device: Dath mount: Dath @@ -374,9 +456,9 @@ class _MountPoint: # TODO use /proc/mount -@dantDC() +@dataclass(slots=T, kw_only=T, frozen=F) class MountPoints: - _mounts: list[_MountPoint] = DantField([], kw_only=T, repr=T) + _mounts: list[_MountPoint] = field(default_factory=list, kw_only=T, repr=T) def __post_init__(_) -> N: mntPath = Dath('/proc/mounts') @@ -397,6 +479,7 @@ class MountPoints: def f(x: _MountPoint): if dev and x.device != dev: return + # print(type(x.mount._path), type(mnt)) if mnt and x.mount != mnt: return if fs and x.fs != fs: diff --git a/defl/mpv2_.py b/defl/mpv2_.py index 4bf8344..7a8c7c1 100644 --- a/defl/mpv2_.py +++ b/defl/mpv2_.py @@ -1,486 +1,553 @@ -#!/usr/bin/env python - -# TODO subtitiles -""" -# https://github.com/jaseg/python-mpv/blob/main/mpv.py -""" - -from dataclasses import dataclass, field -import re, threading -import os -import enum -import json -import atexit -import socket -from time import sleep -from defl import ( - cl, - log, - Dath, - Run, -) -from defl._typing_ import * -from contextlib import contextmanager -from defl._typing_ import T, F, N, U -from defl._rich_ import * -from defl.mpv2_ import * -import defl - - -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ -# ◼◼ Mpv Log Classes -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - -@dataclass(slots=T, kw_only=T, frozen=F) -class MpvEvent: - event: str - reason: str | UT = U - file_error: str | UT = U - playlist_entry_id: int | UT = U - - -@dataclass(slots=T, kw_only=T, frozen=F) -class MpvRequest: - data: Any | UT = U - error: str - - def success(_) -> bool: - return _.error == 'success' - - def assSuc(_): - assert _.success(), _ - - -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ -# ◼◼ exceptions -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - -class MpvError(Exception): - __slot__ = () - - -class MpvSocketExistsError(MpvError): - __slot__ = () - - -class MpvRunningError(MpvError): - __slot__ = () - - -class MpvNoSocketFileError(MpvError): - __slot__ = () - - -class MpvNotRunningError(MpvError): - __slot__ = () - - -class MpvFailedToStartVideo(MpvError): - __slot__ = () - - -class MpvEventsChangedOnIter(MpvError, IndexError): - __slot__ = () - - -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ -# ◼◼ Enum -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - -class MpvFailReason(defl.Enumer): - AgeRestricted = enum.auto() - Forbidden403 = enum.auto() - - -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ -# ◼◼ MPV -# ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - -@dataclass(slots=T, kw_only=T, frozen=F) -class Mpv: - _tmpPath: Dath = N - _run: Run | UT = U - _socket: socket.socket | UT = U - _events: list[MpvEvent] = field(default_factory=list, kw_only=T, repr=F) - _request: dict[int, MpvRequest] = field(default_factory=dict, kw_only=T, repr=F) - _running: bool = F - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ printing - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - def richAutoReprSuper(_): - yield 'running', _._running - - __rich_repr__ = defl.Obj.richAutoRepr - __str__ = defl.Obj.toRichRepr - __repr__ = defl.Obj.toRichRepr - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ properies - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - @property - def running(_) -> bool: - return _._running - - @property - def sockPath(_) -> Dath: - return _._tmpPath.reExt('sock') - - @property - def logPath(_) -> Dath: - return _._tmpPath.reExt('log') - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ init - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - def __post_init__(_) -> N: - _._tmpPath = Dath.Temp().temp(create=F) - # _.logPath.rmOnExit() - - def run(_) -> Run: - if _.sockPath.exists(): - raise MpvSocketExistsError() - if _._run is not U and _._run.rc is N: - raise MpvRunningError() - if _._running: - raise MpvRunningError() - _._running = T - _._run = Run(_.cmd).run(N, N) - _._socket = _.connectToSocket() - atexit.register(_.exit) - return _._run - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ CLI - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - _ydlQuality: str = 'worst[height>=320]/worst/worstaudio+worstvideo' - - def ydlQualityGS(_, val: str = U) -> str: - if val is not U: - _._ydlQuality = val - return _._ydlQuality - - _extraMpvOpt: list[str] = field(default_factory=list, kw_only=T, repr=T) - - def extraMpvOptGS(_, *args: str, **kargs) -> list[str]: - _._extraMpvOpt.extend(args) - assert inst(_._extraMpvOpt, list) - return _._extraMpvOpt - - @property - def cmd(_) -> list[str]: - empty = '/tmp/empty.mp4' # | have to have a file to start - s = 'anullsrc=r=44100:cl=mono' - cmd = ['ffmpeg', '-y', '-t', 0.1, '-s'] - cmd += ['640x480', '-f', 'rawvideo', '-pix_fmt', 'rgb24', '-r', '25', '-i', '/dev/zero', empty] - # cmd = ['ffmpeg', '-y', '-f', 'lavfi', '-i', s, '-t', '0.1', '-q:a', '9', '-acodec', 'libmp3lame', empty] - Run(cmd).run(T, T).assSuc() - - home = os.environ['HOME'] - ytOpt = [ - f'--script-opts=ytdl_hook-ytdl_path={home}/.local/bin/yt-dlp', - f'--ytdl-format={_.ydlQualityGS()}', - '--ytdl-raw-options=no-playlist=', - ] - mpvOpt = [ - # '--track-auto-selection=no', - '--fullscreen=yes', - f'--input-ipc-server={_.sockPath}', - '--keep-open=always', - '--msg-level=all=error', - f'--log-file={_.logPath}', - '--idle=yes', - ] - return ['mpv', *mpvOpt, *ytOpt, *_.extraMpvOptGS(), '--', empty] - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ socket communication - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - def connectToSocket(_) -> socket.socket: - i = 0 - while not _.sockPath.exists(): - i += 1 - sleep(0.2) - log.info(f'wait for socket ({i})', _.sockPath, end=cl.startOfLine) - _.verifyMpvRunning() - - retry = T - while retry: - with defl.IgnoreExceptionCM(ConnectionError): - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - sock.connect(_.sockPath.s) - sock.settimeout(0.2) - retry = F - - return sock - - _sendCmdId: int = 0 - - def sendCmd(_, *cmd) -> MpvRequest: - if len(cmd) == 1 and inst(cmd[0], list): - cmd = cmd[0] - - if not _.sockPath.exists(): - raise MpvNoSocketFileError() - - idd = _._sendCmdId - log.debug(f'send cmd {idd}', lambda x: x.cmd) - _._sendCmdId = (_._sendCmdId + 1) % 99999 - data = {'command': cmd, 'request_id': idd} - cmd = json.dumps(data).encode('utf8') + b'\n' - with _.useSocketCM(): - _._socket.send(cmd) - - while idd not in _._request: - # TODO optimize - _._addLinesToCache() - sleep(0.05) - res = _._request.pop(idd) - log.debug(f' -> cmd result {idd}', lambda x: x.res) - - return res - - @contextmanager - def useSocketCM(_): - try: - if _._socket is U: - _.verifyMpvRunning() - yield - except BrokenPipeError as e: - _.closeSocket() - _.exit() - _.verifyMpvRunning() - raise e - except TimeoutError: - ... # | ignore - - def closeSocket(_) -> Self: - if _._socket is not U: - _._socket.close() - _._socket = U - return _ - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ controls - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - def verifyMpvRunning(_) -> Self: - if _._run.rc is not N: - raise MpvNotRunningError(f'{_._run.rc} {_.logPath}') - return _ - - def exit(_) -> Self: - if not _._running: - return _ - atexit.unregister(_.exit) - log.info('quit mpv', '') - if _._socket is not U: - try: - _.quitMpv() - except MpvNotRunningError: - log.info('Already exited', '') - _.closeSocket() - _.sockPath.remove() - _._run.kill('int') - assert _._run.rc is not N - _._running = F - return _ - - def steamStartVideo(_, source: str) -> Generator[MpvEvent | N, N, N]: - oldDur = _.duration() - playListId = _.loadSource(source) - - events = _.eventIter() - - i = 0 - while (duration := _.duration()) is N or duration == oldDur: - if i % 10 == 0: - _.verifyMpvRunning() - if _.idleActive(): - # TODO video display dissapears so next video fails. - raise MpvFailedToStartVideo(_.logPath) - i += 1 - log.debug('waiting for video to load') - sleep(0.2) - while (event := next(events)) is not N: - log.info('event', lambda x: x.event) - if event.event == 'end-file': - if event.playlist_entry_id == playListId: - raise MpvFailedToStartVideo(event) - - while (t := _.curTime()) is N or t < 0.1: - log.debug('waiting for video to start') - sleep(0.1) - - workIndex = 0 - while (ptr := _.playTimeRem()) is N or ptr > 0.1 or not _.eofReached(): - workIndex = (workIndex + 1) % 60 - if workIndex % 10 == 0: - _.verifyMpvRunning() # | overkill but doesnt hurt `useSocketCM` will throw if dead - if ptr is N: - log.info('waiting for video to load', '') - yield from _.popEvents() - yield N # | consumer can sleep - yield from _.popEvents() - log.debug('video ended') - - def startVideo(_, source: str) -> Generator[MpvEvent | N, N, N]: - playListId = _.loadSource(source) - workIndex = 0 - fileLoaded = F - while pc := _.sendCmd('get_property', 'playlist-count').data: - assert pc == 1 - - workIndex = (workIndex + 1) % 60 - if workIndex % 10 == 0: - _.verifyMpvRunning() # | overkill but doesnt hurt `useSocketCM` will throw if dead - - for event in _.popEvents(): - if fileLoaded: - if event.event == 'end-file': - # TODO not working - _.sendCmd('stop') - yield event - elif event.event == 'file-loaded': - fileLoaded = T - - if fileLoaded: - yield N # | consumer can sleep - else: - sleep(0.1) - - yield from _.popEvents() - - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - # ◼◼ logs / events - # ◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼◼ - - def popEvents(_) -> Generator[dict, N, N]: - _._addLinesToCache() - while _._events: - yield _._events.pop(0) - - def getLogs(_) -> Generator[str, N, N]: - with _.logPath.open('r') as fp: - while l := fp.readline(): - yield l.strip() - - _cacheLock: defl.ThreadLock = field(default_factory=defl.ThreadLock) - - def _addLinesToCache(_) -> Self: - with _._cacheLock: - # print('_addLinesToCache') - # print('_addLinesToCache', 'lock') - recv = bytearray() - - with _.useSocketCM(): - while r := _._socket.recv(1024): # | can cause process to receive 'Broken pipe' signal - recv += r - recv = recv.split(b'\n') - for line in recv: - if not line: - continue - line = json.loads(line) - match line: - case {'event': __}: - _._events.append(MpvEvent(**line)) - case {'request_id': __}: - rid = line.pop('request_id') - _._request[rid] = MpvRequest(**line) - case __: - raise NotImplementedError('') - - # print('_addLinesToCache', 'unlock') - return _ - - def analyseFailLogs(_) -> MpvFailReason: - for line in _.logLines(): - match line: - case str(): - ... - case dict(): - if line['module'] == '[ytdl_hook]' and 'Sign in to confirm your age.' in line['log']: - return MpvFailReason.AgeRestricted - if line['module'] == '[ffmpeg]' and 'https: HTTP error 403 Forbidden' in line['log']: - return MpvFailReason.Forbidden403 - case __: - raise NotImplementedError(f'no match for {line}') - - def logLines(_) -> Generator[dict, N, N]: - lineReg = re.compile(r'^(?P