Skip to content

Commit

Permalink
Added the pydantic and fixes the Tanuki#85
Browse files Browse the repository at this point in the history
  • Loading branch information
NotAdityaPawar committed Nov 24, 2023
1 parent eec6332 commit 1c4ab0a
Showing 1 changed file with 15 additions and 2 deletions.
17 changes: 15 additions & 2 deletions src/monkey_patch/trackers/buffered_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import os

from appdirs import user_data_dir
from pydantic import BaseModel

from monkey_patch.bloom_filter import BloomFilter, optimal_bloom_filter_params
from monkey_patch.trackers.dataset_worker import DatasetWorker
Expand All @@ -14,6 +15,12 @@
LIB_NAME = "monkey-patch"
ENVVAR = "MONKEY_PATCH_LOG_DIR"

# -- Dataset Length --

class Dataset(BaseModel):
alignments: str
patches: str

class BufferedLogger(DatasetWorker):
def __init__(self, name, level=15):
self.buffers = {}
Expand Down Expand Up @@ -115,7 +122,10 @@ def _load_existing_datasets(self):
# discard all .json files
files = [x for x in files if ".json" not in x]
except Exception as e:
return dataset_lengths
try:
return Dataset(**dataset_lengths)
except Exception as err:
print(err.errors())

for file in files:
if ALIGN_FILE_EXTENSION not in file and PATCH_FILE_EXTENSION not in file:
Expand All @@ -126,7 +136,10 @@ def _load_existing_datasets(self):
dataset_type = "patches"
func_hash = file.replace(ALIGN_FILE_EXTENSION, "").replace(PATCH_FILE_EXTENSION, "")
dataset_lengths[dataset_type][func_hash] = -1
return dataset_lengths
try:
return Dataset(**dataset_lengths)
except Exception as err:
print(err.errors())


def log_align(self, func_hash, *args, **kws):
Expand Down

0 comments on commit 1c4ab0a

Please sign in to comment.