Skip to content

Commit

Permalink
ci: bring configs up to date, switch to pyproject.toml
Browse files Browse the repository at this point in the history
  • Loading branch information
karlicoss committed Nov 6, 2023
1 parent f161f9f commit 939aee6
Show file tree
Hide file tree
Showing 11 changed files with 193 additions and 112 deletions.
2 changes: 1 addition & 1 deletion .ci/run
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,4 @@ if ! command -v python3 &> /dev/null; then
fi

"$PY_BIN" -m pip install --user tox
"$PY_BIN" -m tox
"$PY_BIN" -m tox --parallel --parallel-live "$@"
20 changes: 14 additions & 6 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,22 @@ on:
pull_request: # needed to trigger on others' PRs
# Note that people who fork it need to go to "Actions" tab on their fork and click "I understand my workflows, go ahead and enable them".
workflow_dispatch: # needed to trigger workflows manually
# todo cron?
# todo cron?
inputs:
debug_enabled:
type: boolean
description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)'
required: false
default: false


jobs:
build:
strategy:
fail-fast: false
matrix:
platform: [ubuntu-latest, macos-latest, windows-latest]
python-version: ['3.7', '3.8', '3.9', '3.10']
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
# vvv just an example of excluding stuff from matrix
# exclude: [{platform: macos-latest, python-version: '3.6'}]

Expand All @@ -28,22 +35,23 @@ jobs:
# ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation
- run: echo "$HOME/.local/bin" >> $GITHUB_PATH

- uses: actions/setup-python@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}

- uses: actions/checkout@v3
with:
submodules: recursive
fetch-depth: 0 # nicer to have all git history when debugging/for tests

# uncomment for SSH debugging
# - uses: mxschmitt/action-tmate@v3
- uses: mxschmitt/action-tmate@v3
if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }}

# explicit bash command is necessary for Windows CI runner, otherwise it thinks it's cmd...
- run: bash .ci/run

- if: matrix.platform == 'ubuntu-latest' # no need to compute coverage for other platforms
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v3
with:
name: .coverage.mypy_${{ matrix.platform }}_${{ matrix.python-version }}
path: .coverage.mypy/
Expand Down
38 changes: 38 additions & 0 deletions conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# this is a hack to monkey patch pytest so it handles tests inside namespace packages without __init__.py properly
# without it, pytest can't discover the package root for some reason
# also see https://github.com/karlicoss/pytest_namespace_pkgs for more

import pathlib
from typing import Optional

import _pytest.main
import _pytest.pathlib

# we consider all dirs in repo/ to be namespace packages
root_dir = pathlib.Path(__file__).absolute().parent.resolve() / 'src'
assert root_dir.exists(), root_dir

# TODO assert it contains package name?? maybe get it via setuptools..

namespace_pkg_dirs = [str(d) for d in root_dir.iterdir() if d.is_dir()]

# resolve_package_path is called from _pytest.pathlib.import_path
# takes a full abs path to the test file and needs to return the path to the 'root' package on the filesystem
resolve_pkg_path_orig = _pytest.pathlib.resolve_package_path
def resolve_package_path(path: pathlib.Path) -> Optional[pathlib.Path]:
result = path # search from the test file upwards
for parent in result.parents:
if str(parent) in namespace_pkg_dirs:
return parent
raise RuntimeError("Couldn't determine path for ", path)
_pytest.pathlib.resolve_package_path = resolve_package_path


# without patching, the orig function returns just a package name for some reason
# (I think it's used as a sort of fallback)
# so we need to point it at the absolute path properly
# not sure what are the consequences.. maybe it wouldn't be able to run against installed packages? not sure..
search_pypath_orig = _pytest.main.search_pypath
def search_pypath(module_name: str) -> str:
return str(root_dir)
_pytest.main.search_pypath = search_pypath
7 changes: 6 additions & 1 deletion mypy.ini
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
[mypy]
namespace_packages = True
pretty = True
show_error_context = True
show_error_codes = True
show_column_numbers = True
show_error_end = True
warn_unused_ignores = True
check_untyped_defs = True
namespace_packages = True
enable_error_code = possibly-undefined
strict_equality = True

# an example of suppressing
# [mypy-my.config.repos.pdfannots.pdfannots]
Expand Down
43 changes: 43 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# see https://github.com/karlicoss/pymplate for up-to-date reference
[project]
dynamic = ["version"] # version is managed by setuptools_scm
name = "pinbexport"
dependencies = [
]
# TODO maybe split out DAL deps and export deps? might be nice

## these need to be set if you're planning to upload to pypi
# description = "TODO"
# license = {file = "LICENSE"}
# authors = [
# {name = "Dima Gerasimov (@karlicoss)", email = "karlicoss@gmail.com"},
# ]
# maintainers = [
# {name = "Dima Gerasimov (@karlicoss)", email = "karlicoss@gmail.com"},
# ]
#
# [project.urls]
# Homepage = "https://github.com/karlicoss/pymplate"
##

[project.optional-dependencies]
optional = [
"orjson",
"colorlog",
"ijson", # faster iterative json processing
]
testing = [
"pytest",
"ruff",
"mypy",
"lxml", # for mypy html coverage
]


[build-system]
requires = ["setuptools", "setuptools-scm"]
build-backend = "setuptools.build_meta"

[tool.setuptools_scm]
version_scheme = "python-simplified-semver"
local_scheme = "dirty-tag"
29 changes: 29 additions & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
ignore = [
### too opinionated style checks
"E501", # too long lines
"E702", # Multiple statements on one line (semicolon)
"E731", # assigning lambda instead of using def
"E741", # Ambiguous variable name: `l`
"E742", # Ambiguous class name: `O
"E401", # Multiple imports on one line
"F403", # import *` used; unable to detect undefined names
###

###
"E722", # Do not use bare `except` ## Sometimes it's useful for defensive imports and that sort of thing..
"F811", # Redefinition of unused # this gets in the way of pytest fixtures (e.g. in cachew)

## might be nice .. but later and I don't wanna make it strict
"E402", # Module level import not at top of file

### maybe consider these soon
# sometimes it's useful to give a variable a name even if we don't use it as a documentation
# on the other hand, often is a sign of error
"F841", # Local variable `count` is assigned to but never used
"F401", # imported but unused
###
]

exclude = [
"src/hypexport/Hypothesis",
]
59 changes: 0 additions & 59 deletions setup.py

This file was deleted.

36 changes: 15 additions & 21 deletions src/pinbexport/dal.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,30 @@
#!/usr/bin/env python3
from typing import NamedTuple, Optional, Sequence, Iterator, Set, Iterable
from pathlib import Path
import json
from datetime import datetime
import logging

import pytz
import json
from pathlib import Path
from typing import NamedTuple, Sequence, Iterator, Set, Iterable, NewType

from .exporthelpers.dal_helper import PathIsh, Json
from .exporthelpers import dal_helper
from .exporthelpers.dal_helper import PathIsh, Json, datetime_aware
from .exporthelpers.logging_helper import make_logger


from typing import NewType
Url = NewType('Url', str)

Tag = str


# todo reuse logger from helper
def get_logger() -> logging.Logger:
return logging.getLogger('pinbexport')
logger = make_logger(__name__)


class Bookmark(NamedTuple):
raw: Json

@property
def created(self) -> datetime:
def created(self) -> datetime_aware:
dts = self.raw['time']
return pytz.utc.localize(datetime.strptime(dts, '%Y-%m-%dT%H:%M:%SZ'))
# contains Z at the end, so will end up as UTC
return datetime.fromisoformat(dts)

@property
def url(self) -> Url:
Expand All @@ -36,8 +33,8 @@ def url(self) -> Url:
@property
def title(self) -> str:
titles = self.raw['description']
if titles == False:
titles = '' # *shrug* happened onc
if titles is False:
titles = '' # *shrug* happened a few times
return titles

@property
Expand All @@ -49,7 +46,6 @@ def tags(self) -> Sequence[Tag]:
return tuple(self.raw['tags'].split())



class DAL:
def __init__(self, sources: Sequence[PathIsh]) -> None:
self.sources = [p if isinstance(p, Path) else Path(p) for p in sources]
Expand All @@ -62,25 +58,24 @@ def raw(self) -> Json:
except Exception as e:
raise RuntimeError(f'While processing {last}') from e


def _bookmarks_raw(self) -> Iterable[Json]:
data = self.raw()
if isinstance(data, list):
return data # old format
return data # old format
else:
return data['posts']

def bookmarks(self) -> Iterator[Bookmark]:
def key(b: Bookmark):
return (b.created, b.url)
logger = get_logger()

emitted: Set = set()
for j in self._bookmarks_raw():
bm = Bookmark(j)
# TODO could also detect that by hash?
bk = key(bm)
if bk in emitted:
logger.debug('skipping duplicate item %s', bm)
logger.debug(f'skipping duplicate item {bm}')
continue
emitted.add(bk)
yield bm
Expand All @@ -92,5 +87,4 @@ def demo(dal: DAL) -> None:


if __name__ == '__main__':
from .exporthelpers import dal_helper
dal_helper.main(DAL=DAL, demo=demo)
22 changes: 12 additions & 10 deletions src/pinbexport/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from urllib.parse import urlencode
from urllib.request import urlopen

from .exporthelpers.export_helper import Json
from .exporthelpers.export_helper import Json, setup_parser, Parser


class Exporter:
Expand All @@ -14,18 +14,20 @@ def __init__(self, *args, **kwargs) -> None:
self.api_base = 'https://api.pinboard.in/v1/'

def _get(self, endpoint: str) -> Json:
query = urlencode([
('format' , 'json'),
('auth_token', self.token),
])
query = urlencode(
[
('format', 'json'),
('auth_token', self.token),
]
)
url = self.api_base + endpoint + '?' + query
return json.loads(urlopen(url).read())

def export_json(self) -> Json:
return dict(
tags = self._get('tags/get'),
posts= self._get('posts/all'), # TODO
notes= self._get('notes/list'),
tags=self._get('tags/get'),
posts=self._get('posts/all'),
notes=self._get('notes/list'),
)


Expand All @@ -46,14 +48,14 @@ def main() -> None:


def make_parser() -> argparse.ArgumentParser:
from .exporthelpers.export_helper import setup_parser, Parser
parser = Parser('Export your bookmarks from [[https://pinboard.in][Pinboard]]')
setup_parser(
parser=parser,
params=['token'],
extra_usage='''
You can also import ~export.py~ this as a module and call ~get_json~ function directly to get raw JSON.
''')
''',
)
return parser


Expand Down
Loading

0 comments on commit 939aee6

Please sign in to comment.