Skip to content

Commit

Permalink
build: updated deps, switch to ruff for format+pyugrade, fixed lints …
Browse files Browse the repository at this point in the history
…and type issues, removed takethetime dependency
  • Loading branch information
ErikBjare committed Sep 29, 2024
1 parent 3b83ab5 commit 10ecd1f
Show file tree
Hide file tree
Showing 19 changed files with 434 additions and 627 deletions.
7 changes: 0 additions & 7 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,3 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- uses: jpetrucciani/ruff-check@main

format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- uses: psf/black@stable
6 changes: 1 addition & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -21,18 +21,14 @@ typecheck:
typecheck-strict:
export MYPYPATH=./stubs; python -m mypy aw_core aw_datastore aw_transform aw_query --strict-optional --check-untyped-defs; echo "Not a failing step"

PYFILES=$(shell find . -type f -name '*.py')
PYIFILES=$(shell find . -type f -name '*.pyi')

lint:
ruff check .

lint-fix:
pyupgrade --py37-plus ${PYFILES} && true
ruff check --fix .

format:
black ${PYFILES} ${PYIFILES}
ruff format .

clean:
rm -rf build dist
Expand Down
6 changes: 2 additions & 4 deletions aw_core/__about__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# Inspired by:
# https://github.com/pypa/pipfile/blob/master/pipfile/__about__.py


__all__ = [
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
Expand All @@ -16,10 +16,8 @@
__summary__ = "Core library for ActivityWatch"
__uri__ = "https://github.com/ActivityWatch/aw-core"

__version__ = "0.4.2"

__author__ = "Erik Bjäreholt, Johan Bjäreholt"
__email__ = "[email protected], [email protected]"

__license__ = "MPL2"
__copyright__ = "Copyright 2017 %s" % __author__
__copyright__ = f"Copyright 2017 {__author__}"
3 changes: 0 additions & 3 deletions aw_core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from deprecation import deprecated

from aw_core import dirs
from aw_core.__about__ import __version__

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -81,7 +80,6 @@ def save_config_toml(appname: str, config: str) -> None:
@deprecated(
details="Use the load_config_toml function instead",
deprecated_in="0.5.3",
current_version=__version__,
)
def load_config(appname, default_config):
"""
Expand All @@ -107,7 +105,6 @@ def load_config(appname, default_config):
@deprecated(
details="Use the save_config_toml function instead",
deprecated_in="0.5.3",
current_version=__version__,
)
def save_config(appname, config):
config_dir = dirs.get_config_dir(appname)
Expand Down
4 changes: 2 additions & 2 deletions aw_core/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ def g(*args, **kwargs):
if not warned_for:
warnings.simplefilter("always", DeprecationWarning) # turn off filter
warnings.warn(
"Call to deprecated function {}, "
"this warning will only show once per function.".format(f.__name__),
f"Call to deprecated function {f.__name__}, "
"this warning will only show once per function.",
category=DeprecationWarning,
stacklevel=2,
)
Expand Down
12 changes: 4 additions & 8 deletions aw_core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def __init__(
id: Optional[Id] = None,
timestamp: Optional[ConvertibleTimestamp] = None,
duration: Duration = 0,
data: Data = dict(),
data: Data | None = None,
) -> None:
self.id = id
if timestamp is None:
Expand All @@ -65,7 +65,7 @@ def __init__(
# (lacks support for properties)
self.timestamp = _timestamp_parse(timestamp)
self.duration = duration # type: ignore
self.data = data
self.data = data or {}

def __eq__(self, other: object) -> bool:
if isinstance(other, Event):
Expand All @@ -76,19 +76,15 @@ def __eq__(self, other: object) -> bool:
)
else:
raise TypeError(
"operator not supported between instances of '{}' and '{}'".format(
type(self), type(other)
)
f"operator not supported between instances of '{type(self)}' and '{type(other)}'"
)

def __lt__(self, other: object) -> bool:
if isinstance(other, Event):
return self.timestamp < other.timestamp
else:
raise TypeError(
"operator not supported between instances of '{}' and '{}'".format(
type(self), type(other)
)
f"operator not supported between instances of '{type(self)}' and '{type(other)}'"
)

def to_json_dict(self) -> dict:
Expand Down
108 changes: 55 additions & 53 deletions aw_datastore/benchmark.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
#!/usr/bin/env python3
import sys
from typing import Callable
from datetime import datetime, timedelta, timezone
from contextlib import contextmanager
from datetime import datetime, timedelta, timezone
from typing import Callable

from aw_core.models import Event

from takethetime import ttt

from aw_datastore import get_storage_methods, Datastore
from aw_datastore import get_storage_methods
from aw_datastore.storages import AbstractStorage

td1s = timedelta(seconds=1)
Expand Down Expand Up @@ -39,54 +37,58 @@ def temporary_bucket(ds):


def benchmark(storage: Callable[..., AbstractStorage]):
if storage.__name__ == "PeeweeStorage":
ds = Datastore(storage, testing=True, filepath="test.db")
else:
ds = Datastore(storage, testing=True)

num_single_events = 50
num_replace_events = 50
num_bulk_events = 20_000
num_events = num_single_events + num_replace_events + num_bulk_events + 1
num_final_events = num_single_events + num_bulk_events + 1

events = create_test_events(num_events)
single_events = events[:num_single_events]
replace_events = events[num_single_events : num_single_events + num_replace_events]
bulk_events = events[num_single_events + num_replace_events : -1]

print(storage.__name__)

with temporary_bucket(ds) as bucket:
with ttt(" sum"):
with ttt(f" single insert {num_single_events} events"):
for event in single_events:
bucket.insert(event)

with ttt(f" bulk insert {num_bulk_events} events"):
bucket.insert(bulk_events)

with ttt(f" replace last {num_replace_events}"):
for e in replace_events:
bucket.replace_last(e)

with ttt(" insert 1 event"):
bucket.insert(events[-1])

with ttt(" get one"):
events_tmp = bucket.get(limit=1)

with ttt(" get all"):
events_tmp = bucket.get(limit=-1)
assert len(events_tmp) == num_final_events

with ttt(" get range"):
events_tmp = bucket.get(
limit=-1,
starttime=events[1].timestamp + 0.01 * td1s,
endtime=events[-1].timestamp + events[-1].duration,
)
assert len(events_tmp) == num_final_events - 1
raise NotImplementedError(
"No longer implemented as ttt/takethetime dependency is removed"
)

# if storage.__name__ == "PeeweeStorage":
# ds = Datastore(storage, testing=True, filepath="test.db")
# else:
# ds = Datastore(storage, testing=True)

# num_single_events = 50
# num_replace_events = 50
# num_bulk_events = 20_000
# num_events = num_single_events + num_replace_events + num_bulk_events + 1
# num_final_events = num_single_events + num_bulk_events + 1

# events = create_test_events(num_events)
# single_events = events[:num_single_events]
# replace_events = events[num_single_events : num_single_events + num_replace_events]
# bulk_events = events[num_single_events + num_replace_events : -1]

# print(storage.__name__)

# with temporary_bucket(ds) as bucket:
# with ttt(" sum"):
# with ttt(f" single insert {num_single_events} events"):
# for event in single_events:
# bucket.insert(event)

# with ttt(f" bulk insert {num_bulk_events} events"):
# bucket.insert(bulk_events)

# with ttt(f" replace last {num_replace_events}"):
# for e in replace_events:
# bucket.replace_last(e)

# with ttt(" insert 1 event"):
# bucket.insert(events[-1])

# with ttt(" get one"):
# events_tmp = bucket.get(limit=1)

# with ttt(" get all"):
# events_tmp = bucket.get(limit=-1)
# assert len(events_tmp) == num_final_events

# with ttt(" get range"):
# events_tmp = bucket.get(
# limit=-1,
# starttime=events[1].timestamp + 0.01 * td1s,
# endtime=events[-1].timestamp + events[-1].duration,
# )
# assert len(events_tmp) == num_final_events - 1


if __name__ == "__main__":
Expand Down
23 changes: 8 additions & 15 deletions aw_datastore/datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,7 @@ def __init__(
self.storage_strategy = storage_strategy(testing=testing, **kwargs)

def __repr__(self):
return "<Datastore object using {}>".format(
self.storage_strategy.__class__.__name__
)
return f"<Datastore object using {self.storage_strategy.__class__.__name__}>"

def __getitem__(self, bucket_id: str) -> "Bucket":
# If this bucket doesn't have a initialized object, create it
Expand All @@ -41,9 +39,7 @@ def __getitem__(self, bucket_id: str) -> "Bucket":
self.bucket_instances[bucket_id] = bucket
else:
self.logger.error(
"Cannot create a Bucket object for {} because it doesn't exist in the database".format(
bucket_id
)
f"Cannot create a Bucket object for {bucket_id} because it doesn't exist in the database"
)
raise KeyError

Expand All @@ -55,10 +51,11 @@ def create_bucket(
type: str,
client: str,
hostname: str,
created: datetime = datetime.now(timezone.utc),
created: Optional[datetime] = None,
name: Optional[str] = None,
data: Optional[dict] = None,
) -> "Bucket":
created = created or datetime.now(timezone.utc)
self.logger.info(f"Creating bucket '{bucket_id}'")
self.storage_strategy.create_bucket(
bucket_id, type, client, hostname, created.isoformat(), name=name, data=data
Expand Down Expand Up @@ -106,8 +103,8 @@ def get(
milliseconds = 1 + int(endtime.microsecond / 1000)
second_offset = int(milliseconds / 1000) # usually 0, rarely 1
microseconds = (
1000 * milliseconds
) % 1000000 # will likely just be 1000 * milliseconds, if it overflows it would become zero
(1000 * milliseconds) % 1000000
) # will likely just be 1000 * milliseconds, if it overflows it would become zero
endtime = endtime.replace(microsecond=microseconds) + timedelta(
seconds=second_offset
)
Expand Down Expand Up @@ -153,9 +150,7 @@ def insert(self, events: Union[Event, List[Event]]) -> Optional[Event]:
oldest_event: Optional[Event] = events
if events.timestamp + events.duration > now:
self.logger.warning(
"Event inserted into bucket {} reaches into the future. Current UTC time: {}. Event data: {}".format(
self.bucket_id, str(now), str(events)
)
f"Event inserted into bucket {self.bucket_id} reaches into the future. Current UTC time: {str(now)}. Event data: {str(events)}"
)
inserted = self.ds.storage_strategy.insert_one(self.bucket_id, events)
# assert inserted
Expand All @@ -167,9 +162,7 @@ def insert(self, events: Union[Event, List[Event]]) -> Optional[Event]:
for event in events:
if event.timestamp + event.duration > now:
self.logger.warning(
"Event inserted into bucket {} reaches into the future. Current UTC time: {}. Event data: {}".format(
self.bucket_id, str(now), str(event)
)
f"Event inserted into bucket {self.bucket_id} reaches into the future. Current UTC time: {str(now)}. Event data: {str(event)}"
)
self.ds.storage_strategy.insert_many(self.bucket_id, events)
else:
Expand Down
6 changes: 3 additions & 3 deletions aw_datastore/storages/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,15 +64,15 @@ def update_bucket(
if data:
self._metadata[bucket_id]["data"] = data
else:
raise Exception("Bucket did not exist, could not update")
raise ValueError("Bucket did not exist, could not update")

def delete_bucket(self, bucket_id: str) -> None:
if bucket_id in self.db:
del self.db[bucket_id]
if bucket_id in self._metadata:
del self._metadata[bucket_id]
else:
raise Exception("Bucket did not exist, could not delete")
raise ValueError("Bucket did not exist, could not delete")

def buckets(self):
buckets = dict()
Expand Down Expand Up @@ -134,7 +134,7 @@ def get_metadata(self, bucket_id: str):
if bucket_id in self._metadata:
return self._metadata[bucket_id]
else:
raise Exception("Bucket did not exist, could not get metadata")
raise ValueError("Bucket did not exist, could not get metadata")

def insert_one(self, bucket: str, event: Event) -> Event:
if event.id is not None:
Expand Down
6 changes: 3 additions & 3 deletions aw_datastore/storages/peewee.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ def update_bucket(

bucket.save()
else:
raise Exception("Bucket did not exist, could not update")
raise ValueError("Bucket did not exist, could not update")

def delete_bucket(self, bucket_id: str) -> None:
if bucket_id in self.bucket_keys:
Expand All @@ -227,7 +227,7 @@ def delete_bucket(self, bucket_id: str) -> None:
).execute()
self.update_bucket_keys()
else:
raise Exception("Bucket did not exist, could not delete")
raise ValueError("Bucket did not exist, could not delete")

def get_metadata(self, bucket_id: str):
if bucket_id in self.bucket_keys:
Expand All @@ -236,7 +236,7 @@ def get_metadata(self, bucket_id: str):
).json()
return bucket
else:
raise Exception("Bucket did not exist, could not get metadata")
raise ValueError("Bucket did not exist, could not get metadata")

def insert_one(self, bucket_id: str, event: Event) -> Event:
e = EventModel.from_event(self.bucket_keys[bucket_id], event)
Expand Down
Loading

0 comments on commit 10ecd1f

Please sign in to comment.