From 300861bc7149f28c9c5906ae7411900e7ca96066 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Tue, 28 May 2024 12:00:15 +0200 Subject: [PATCH 001/313] add Request.deposit_result --- .../syft/src/syft/service/request/request.py | 90 ++++++++++++++++++- 1 file changed, 87 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 3afbafee914..882bde0027a 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -523,7 +523,7 @@ def codes(self) -> Any: ) @property - def code(self) -> Any: + def code(self) -> UserCode | SyftError: for change in self.changes: if isinstance(change, UserCodeStatusChange): return change.code @@ -707,8 +707,6 @@ def _get_latest_or_create_job(self) -> Job | SyftError: print("Creating job for existing user code") job = job_service.create_job_for_user_code_id(self.code.id) else: - print("returning existing job") - print("setting permission") job = existing_jobs[-1] res = job_service.add_read_permission_job_for_code_owner(job, self.code) print(res) @@ -729,6 +727,92 @@ def _get_job_from_action_object(self, action_object: ActionObject) -> Job | None if job.result and job.result.id == action_object.id: return job + def deposit_result(self, result: Any, logs: str | None = None) -> Job | SyftError: + """ + Adds a result to this Request: + - Create an ActionObject from the result (if not already an ActionObject) + - Ensure ActionObject exists on this node + - Create Job with new result and logs + - Update the output history + + Args: + result (Any): ActionObject or any object to be saved as an ActionObject. + logs (str | None, optional): Optional logs to be saved with the Job. Defaults to None. + + Returns: + Job | SyftError: Job object if successful, else SyftError. + """ + api = APIRegistry.api_for(self.node_uid, self.syft_client_verify_key) + code = self.code + if isinstance(code, SyftError): + return code + + # Create ActionObject from result + if isinstance(result, ActionObject): + existing_job = self._get_job_from_action_object(result) + if existing_job is not None: + return SyftError( + message=f"This ActionObject is already the result of Job {existing_job.id}" + ) + action_object = result + else: + action_object = ActionObject.from_obj( + result, + syft_client_verify_key=api.signing_key.verify_key, + syft_node_location=api.node_uid, + ) + + # Ensure ActionObject exists on this node + action_object_is_from_this_node = isinstance( + api.services.action.exists(action_object.id.id), SyftSuccess + ) + if ( + action_object.syft_blob_storage_entry_id is None + or not action_object_is_from_this_node + ): + # TODO refactor ActionObject.send and use here + action_object.reload_cache() + action_object.syft_node_location = self.syft_node_location + action_object.syft_client_verify_key = self.syft_client_verify_key + blob_store_result = action_object._save_to_blob_storage() + if isinstance(blob_store_result, SyftError): + return blob_store_result + result = api.services.action.set(action_object) + if isinstance(result, SyftError): + return result + + # Create Job with new result and logs + job = api.services.job.create_job_for_user_code_id( + code.id, + result=action_object, + logs_str=logs, + ) + if isinstance(job, SyftError): + return job + + job.result = action_object + if logs is not None: + api.services.log.append(job.log_id, new_str=logs) + res = api.services.job.update(job) + if isinstance(res, SyftError): + return res + + # Update the output history + input_ids = {} + if code.input_policy is not None: + for inps in code.input_policy.inputs.values(): + input_ids.update(inps) + res = api.services.code.store_as_history( + user_code_id=code.id, + outputs=result, + job_id=job.id, + input_ids=input_ids, + ) + if isinstance(res, SyftError): + return res + + return job + def accept_by_depositing_result( self, result: Any, force: bool = False ) -> SyftError | SyftSuccess: From 5986873825a8ce651f2f24b520abb5f0fa8078cc Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Tue, 28 May 2024 12:04:13 +0200 Subject: [PATCH 002/313] fix create_job --- packages/syft/src/syft/service/request/request.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 882bde0027a..5f83285fd8a 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -782,11 +782,7 @@ def deposit_result(self, result: Any, logs: str | None = None) -> Job | SyftErro return result # Create Job with new result and logs - job = api.services.job.create_job_for_user_code_id( - code.id, - result=action_object, - logs_str=logs, - ) + job = api.services.job.create_job_for_user_code_id(code.id) if isinstance(job, SyftError): return job From 92f10079fd04602f6c08b4734f28555ac1b8623e Mon Sep 17 00:00:00 2001 From: khoaguin Date: Wed, 29 May 2024 09:48:49 +0700 Subject: [PATCH 003/313] [bugfix] add mock execution permissions check for `Job.wait()` --- packages/syft/src/syft/service/action/action_object.py | 2 +- packages/syft/src/syft/service/code/user_code_service.py | 5 +++++ packages/syft/src/syft/service/job/job_stash.py | 7 ++++++- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 3cfe5fe2617..73e84331bde 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1388,7 +1388,7 @@ def remove_trace_hook(cls) -> bool: def as_empty_data(self) -> ActionDataEmpty: return ActionDataEmpty(syft_internal_type=self.syft_internal_type) - def wait(self, timeout: int | None = None) -> ActionObject: + def wait(self, timeout: int | None = None) -> ActionObject | SyftError: # relative from ...client.api import APIRegistry diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 78a7c1c3170..a281ce12bfa 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -350,6 +350,11 @@ def is_execution_allowed( else: return True + @service_method( + path="code.is_execution_on_owned_args_allowed", + name="is_execution_on_owned_args_allowed", + roles=GUEST_ROLE_LEVEL, + ) def is_execution_on_owned_args_allowed( self, context: AuthedServiceContext ) -> bool | SyftError: diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 2943913cf73..801529658c1 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -642,7 +642,7 @@ def _repr_html_(self) -> str: def wait( self, job_only: bool = False, timeout: int | None = None - ) -> Any | SyftNotReady: + ) -> Any | SyftNotReady | SyftError: # stdlib from time import sleep @@ -653,6 +653,11 @@ def wait( if self.resolved: return self.resolve + if not api.services.code.is_execution_on_owned_args_allowed(): + return SyftError( + message="You do not have the permissions for mock execution, please contact the admin." + ) + if not job_only and self.result is not None: self.result.wait(timeout) From 4ec0ed05f2da8938d6cb324e257d0dc94da4dfe6 Mon Sep 17 00:00:00 2001 From: khoaguin Date: Wed, 29 May 2024 10:42:02 +0700 Subject: [PATCH 004/313] [syft/job] fix linting --- packages/syft/src/syft/service/job/job_stash.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 801529658c1..5800d5b57a0 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -650,6 +650,12 @@ def wait( node_uid=self.syft_node_location, user_verify_key=self.syft_client_verify_key, ) + + if api is None: + raise ValueError( + f"Can't access Syft API. You must login to node with id '{self.syft_node_location}'" + ) + if self.resolved: return self.resolve @@ -661,10 +667,6 @@ def wait( if not job_only and self.result is not None: self.result.wait(timeout) - if api is None: - raise ValueError( - f"Can't access Syft API. You must login to {self.syft_node_location}" - ) print_warning = True counter = 0 while True: From 85ad49e9d4a8f8f7c37caeeaa3ff90abe708a155 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 29 May 2024 12:59:45 +0200 Subject: [PATCH 005/313] cleanup --- .../syft/src/syft/client/domain_client.py | 23 +--- .../src/syft/service/action/action_object.py | 25 +++- .../syft/src/syft/service/code/user_code.py | 2 +- .../syft/service/code/user_code_service.py | 10 +- .../syft/src/syft/service/job/job_service.py | 33 +++-- .../syft/src/syft/service/log/log_service.py | 9 +- .../syft/src/syft/service/request/request.py | 127 ++++++++++++------ packages/syft/src/syft/types/syft_object.py | 11 ++ 8 files changed, 150 insertions(+), 90 deletions(-) diff --git a/packages/syft/src/syft/client/domain_client.py b/packages/syft/src/syft/client/domain_client.py index 8f1e7cb9dc8..e2a3c91cca6 100644 --- a/packages/syft/src/syft/client/domain_client.py +++ b/packages/syft/src/syft/client/domain_client.py @@ -147,27 +147,6 @@ def upload_dataset(self, dataset: CreateDataset) -> SyftSuccess | SyftError: return valid return self.api.services.dataset.add(dataset=dataset) - # def get_permissions_for_other_node( - # self, - # items: list[Union[ActionObject, SyftObject]], - # ) -> dict: - # if len(items) > 0: - # if not len({i.syft_node_location for i in items}) == 1 or ( - # not len({i.syft_client_verify_key for i in items}) == 1 - # ): - # raise ValueError("permissions from different nodes") - # item = items[0] - # api = APIRegistry.api_for( - # item.syft_node_location, item.syft_client_verify_key - # ) - # if api is None: - # raise ValueError( - # f"Can't access the api. Please log in to {item.syft_node_location}" - # ) - # return api.services.sync.get_permissions(items) - # else: - # return {} - def refresh(self) -> None: if self.credentials: self._fetch_node_metadata(self.credentials) @@ -196,7 +175,7 @@ def apply_state(self, resolved_state: ResolvedSyncState) -> SyftSuccess | SyftEr for action_object in action_objects: # NOTE permissions are added separately server side - action_object._send(self, add_storage_permission=False) + action_object._send(self.id, self.verify_key, add_storage_permission=False) ignored_batches = resolved_state.ignored_batches diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 3cfe5fe2617..cf9e142455f 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1192,13 +1192,26 @@ def wrapper( return wrapper def send(self, client: SyftClient) -> Any: - return self._send(client, add_storage_permission=True) + return self._send( + node_uid=client.id, + verify_key=client.verify_key, + add_storage_permission=True, + ) + + def _send( + self, + node_uid: UID, + verify_key: SyftVerifyKey, + add_storage_permission: bool = True, + ) -> Self: + self._set_obj_location_(node_uid, verify_key) + + blob_storage_res = self._save_to_blob_storage() + if isinstance(blob_storage_res, SyftError): + return blob_storage_res - def _send(self, client: SyftClient, add_storage_permission: bool = True) -> Self: - """Send the object to a Syft Client""" - self._set_obj_location_(client.id, client.verify_key) - self._save_to_blob_storage() - res = client.api.services.action.set( + api = APIRegistry.api_for(node_uid, verify_key) + res = api.services.action.set( self, add_storage_permission=add_storage_permission ) if isinstance(res, ActionObject): diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 2cbeaf31967..19784c4f12e 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -563,7 +563,7 @@ def get_output_history( output_service = cast(OutputService, context.node.get_service("outputservice")) return output_service.get_by_user_code_id(context, self.id) - def store_as_history( + def store_execution_output( self, context: AuthedServiceContext, outputs: Any, diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 78a7c1c3170..bfa93a32beb 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -541,7 +541,7 @@ def _call( # this currently only works for nested syft_functions # and admins executing on high side (TODO, decide if we want to increment counter) if not skip_fill_cache and output_policy is not None: - res = code.store_as_history( + res = code.store_execution_output( context=context, outputs=result, job_id=context.job_id, @@ -596,9 +596,11 @@ def has_code_permission( return SyftSuccess(message="you have permission") @service_method( - path="code.store_as_history", name="store_as_history", roles=GUEST_ROLE_LEVEL + path="code.store_execution_output", + name="store_execution_output", + roles=GUEST_ROLE_LEVEL, ) - def store_as_history( + def store_execution_output( self, context: AuthedServiceContext, user_code_id: UID, @@ -614,7 +616,7 @@ def store_as_history( if not code.get_status(context).approved: return SyftError(message="Code is not approved") - res = code.store_as_history( + res = code.store_execution_output( context=context, outputs=outputs, job_id=job_id, diff --git a/packages/syft/src/syft/service/job/job_service.py b/packages/syft/src/syft/service/job/job_service.py index 323dff99ae9..c63d3d9786d 100644 --- a/packages/syft/src/syft/service/job/job_service.py +++ b/packages/syft/src/syft/service/job/job_service.py @@ -11,6 +11,7 @@ from ...store.document_store import DocumentStore from ...types.uid import UID from ...util.telemetry import instrument +from ..action.action_object import ActionObject from ..action.action_permissions import ActionObjectPermission from ..action.action_permissions import ActionPermission from ..code.user_code import UserCode @@ -304,13 +305,21 @@ def add_read_permission_log_for_code_owner( roles=DATA_OWNER_ROLE_LEVEL, ) def create_job_for_user_code_id( - self, context: AuthedServiceContext, user_code_id: UID + self, + context: AuthedServiceContext, + user_code_id: UID, + result: ActionObject | None = None, + log_stdout: str | None = None, + log_stderr: str | None = None, + status: JobStatus = JobStatus.CREATED, + add_code_owner_read_permissions: bool = True, ) -> Job | SyftError: job = Job( id=UID(), node_uid=context.node.id, action=None, - result_id=None, + result=result, + status=status, parent_id=None, log_id=UID(), job_pid=None, @@ -323,19 +332,21 @@ def create_job_for_user_code_id( # The owner of the code should be able to read the job self.stash.set(context.credentials, job) - self.add_read_permission_job_for_code_owner(context, job, user_code) log_service = context.node.get_service("logservice") - res = log_service.add(context, job.log_id, job.id) + res = log_service.add( + context, + job.log_id, + job.id, + stdout=log_stdout, + stderr=log_stderr, + ) if isinstance(res, SyftError): return res - # The owner of the code should be able to read the job log - self.add_read_permission_log_for_code_owner(context, job.log_id, user_code) - # log_service.stash.add_permission( - # ActionObjectPermission( - # job.log_id, ActionPermission.READ, user_code.user_verify_key - # ) - # ) + + if add_code_owner_read_permissions: + self.add_read_permission_job_for_code_owner(context, job, user_code) + self.add_read_permission_log_for_code_owner(context, job.log_id, user_code) return job diff --git a/packages/syft/src/syft/service/log/log_service.py b/packages/syft/src/syft/service/log/log_service.py index 5174bf11809..a2f68c3ffad 100644 --- a/packages/syft/src/syft/service/log/log_service.py +++ b/packages/syft/src/syft/service/log/log_service.py @@ -33,9 +33,14 @@ def __init__(self, store: DocumentStore) -> None: @service_method(path="log.add", name="add", roles=DATA_SCIENTIST_ROLE_LEVEL) def add( - self, context: AuthedServiceContext, uid: UID, job_id: UID + self, + context: AuthedServiceContext, + uid: UID, + job_id: UID, + stdout: str = "", + stderr: str = "", ) -> SyftSuccess | SyftError: - new_log = SyftLog(id=uid, job_id=job_id) + new_log = SyftLog(id=uid, job_id=job_id, stdout=stdout, stderr=stderr) result = self.stash.set(context.credentials, new_log) if result.is_err(): return SyftError(message=str(result.err())) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 5f83285fd8a..e09fbcd73bb 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -37,6 +37,7 @@ from ...types.uid import UID from ...util import options from ...util.colors import SURFACE +from ...util.decorators import deprecated from ...util.markdown import markdown_as_class_with_fields from ...util.notebook_ui.icons import Icon from ...util.util import prompt_warning_message @@ -427,7 +428,7 @@ def _repr_html_(self) -> Any: if self.code and len(self.code.output_readers) > 0: # owner_names = ["canada", "US"] owners_string = " and ".join( - [f"{x}" for x in self.code.output_reader_names] + [f"{x}" for x in self.code.output_reader_names] # type: ignore ) shared_with_line += ( f"

Custom Policy: " @@ -727,27 +728,15 @@ def _get_job_from_action_object(self, action_object: ActionObject) -> Job | None if job.result and job.result.id == action_object.id: return job - def deposit_result(self, result: Any, logs: str | None = None) -> Job | SyftError: - """ - Adds a result to this Request: - - Create an ActionObject from the result (if not already an ActionObject) - - Ensure ActionObject exists on this node - - Create Job with new result and logs - - Update the output history - - Args: - result (Any): ActionObject or any object to be saved as an ActionObject. - logs (str | None, optional): Optional logs to be saved with the Job. Defaults to None. - - Returns: - Job | SyftError: Job object if successful, else SyftError. - """ - api = APIRegistry.api_for(self.node_uid, self.syft_client_verify_key) - code = self.code - if isinstance(code, SyftError): - return code + def _create_action_object_for_deposited_result( + self, + result: Any, + ) -> ActionObject | SyftError: + api = self._get_api() + if isinstance(api, SyftError): + return api - # Create ActionObject from result + # Ensure result is an ActionObject if isinstance(result, ActionObject): existing_job = self._get_job_from_action_object(result) if existing_job is not None: @@ -770,45 +759,95 @@ def deposit_result(self, result: Any, logs: str | None = None) -> Job | SyftErro action_object.syft_blob_storage_entry_id is None or not action_object_is_from_this_node ): - # TODO refactor ActionObject.send and use here action_object.reload_cache() - action_object.syft_node_location = self.syft_node_location - action_object.syft_client_verify_key = self.syft_client_verify_key - blob_store_result = action_object._save_to_blob_storage() - if isinstance(blob_store_result, SyftError): - return blob_store_result - result = api.services.action.set(action_object) + result = action_object._send(self.node_uid, self.syft_client_verify_key) if isinstance(result, SyftError): return result - # Create Job with new result and logs - job = api.services.job.create_job_for_user_code_id(code.id) - if isinstance(job, SyftError): - return job + return action_object - job.result = action_object - if logs is not None: - api.services.log.append(job.log_id, new_str=logs) - res = api.services.job.update(job) - if isinstance(res, SyftError): - return res + def _create_output_history_for_deposited_result( + self, job: Job, result: Any + ) -> SyftSuccess | SyftError: + code = self.code + if isinstance(code, SyftError): + return code + api = self._get_api() + if isinstance(api, SyftError): + return api - # Update the output history input_ids = {} - if code.input_policy is not None: - for inps in code.input_policy.inputs.values(): - input_ids.update(inps) - res = api.services.code.store_as_history( + input_policy = code.input_policy + if input_policy is not None: + for input_ in input_policy.inputs.values(): + input_ids.update(input_) + res = api.services.code.store_execution_output( user_code_id=code.id, outputs=result, job_id=job.id, input_ids=input_ids, ) + + return res + + def deposit_result( + self, + result: Any, + logs: str | None = None, + add_code_owner_read_permissions: bool = True, + ) -> Job | SyftError: + """ + Adds a result to this Request: + - Create an ActionObject from the result (if not already an ActionObject) + - Ensure ActionObject exists on this node + - Create Job with new result and logs + - Update the output history + + Args: + result (Any): ActionObject or any object to be saved as an ActionObject. + logs (str | None, optional): Optional logs to be saved with the Job. Defaults to None. + + Returns: + Job | SyftError: Job object if successful, else SyftError. + """ + + # TODO check if this is a low-side request. If not, SyftError + + api = self._get_api() + if isinstance(api, SyftError): + return api + code = self.code + if isinstance(code, SyftError): + return code + + # Create ActionObject + action_object = self._create_action_object_for_deposited_result(result) + if isinstance(action_object, SyftError): + return action_object + + # Create Job + job = api.services.job.create_job_for_user_code_id( + code.id, + result=action_object, + log_stdout=logs, + status=JobStatus.COMPLETED, + add_code_owner_read_permissions=add_code_owner_read_permissions, + ) + if isinstance(job, SyftError): + return job + + # Add to output history + res = self._create_output_history_for_deposited_result(job, result) if isinstance(res, SyftError): return res return job + @deprecated( + return_syfterror=True, + reason="accept_by_depositing_result has been removed. Use approve instead to " + "approve this request, or deposit_result to deposit a new result.", + ) def accept_by_depositing_result( self, result: Any, force: bool = False ) -> SyftError | SyftSuccess: @@ -984,7 +1023,7 @@ def accept_by_depositing_result( for inps in code.input_policy.inputs.values(): input_ids.update(inps) - res = api.services.code.store_as_history( + res = api.services.code.store_execution_output( user_code_id=code.id, outputs=result, job_id=job.id, diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 3ec9c073165..88ba6587889 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -49,6 +49,8 @@ if TYPE_CHECKING: # relative + from ..client.api import SyftAPI + from ..service.response import SyftError from ..service.sync.diff_state import AttrDiff IntStr = int | str @@ -700,6 +702,15 @@ def syft_get_diffs(self, ext_obj: Self) -> list["AttrDiff"]: diff_attrs.append(diff_attr) return diff_attrs + def _get_api(self) -> SyftAPI | SyftError: + # relative + from ..client.api import APIRegistry + + api = APIRegistry.api_for(self.syft_node_location, self.syft_client_verify_key) + if api is None: + return SyftError(f"Can't access the api. You must login to {self.node_uid}") + return api + ## OVERRIDING pydantic.BaseModel.__getattr__ ## return super().__getattribute__(item) -> return self.__getattribute__(item) ## so that ActionObject.__getattribute__ works properly, From 41759cf7db2efcf328b3a3285f2232ad3a27513b Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 29 May 2024 16:38:02 +0200 Subject: [PATCH 006/313] fix store executionoutput --- .../syft/src/syft/service/code/user_code.py | 19 ++++++++++++++++--- .../syft/service/code/user_code_service.py | 5 +++-- .../syft/src/syft/service/job/job_service.py | 4 ++-- .../syft/src/syft/service/request/request.py | 13 ++++++------- packages/syft/src/syft/types/syft_object.py | 2 +- 5 files changed, 28 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 19784c4f12e..0bcb6ce3aea 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -84,6 +84,7 @@ from ..response import SyftSuccess from ..response import SyftWarning from ..user.user import UserView +from ..user.user_roles import ServiceRole from .code_parse import GlobalsVisitor from .code_parse import LaunchJobVisitor from .unparse import unparse @@ -528,6 +529,18 @@ def _get_output_policy(self) -> OutputPolicy | None: print(f"Failed to deserialize custom output policy state. {e}") return None + @property + def output_policy_id(self) -> UID | None: + if self.output_policy_init_kwargs is not None: + return self.output_policy_init_kwargs.get("id", None) + return None + + @property + def input_policy_id(self) -> UID | None: + if self.input_policy_init_kwargs is not None: + return self.input_policy_init_kwargs.get("id", None) + return None + @property def output_policy(self) -> OutputPolicy | None: # type: ignore if not self.status.approved: @@ -570,12 +583,12 @@ def store_execution_output( job_id: UID | None = None, input_ids: dict[str, UID] | None = None, ) -> ExecutionOutput | SyftError: + is_admin = context.role == ServiceRole.ADMIN output_policy = self.get_output_policy(context) - if output_policy is None: + if output_policy is None and not is_admin: return SyftError( message="You must wait for the output policy to be approved" ) - output_ids = filter_only_uids(outputs) output_service = context.node.get_service("outputservice") @@ -586,7 +599,7 @@ def store_execution_output( output_ids=output_ids, executing_user_verify_key=self.user_verify_key, job_id=job_id, - output_policy_id=output_policy.id, + output_policy_id=self.output_policy_id, input_ids=input_ids, ) if isinstance(execution_result, SyftError): diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index bfa93a32beb..9f9c181266e 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -612,9 +612,10 @@ def store_execution_output( if code_result.is_err(): return SyftError(message=code_result.err()) + is_admin = context.role == ServiceRole.ADMIN code: UserCode = code_result.ok() - if not code.get_status(context).approved: - return SyftError(message="Code is not approved") + if not code.get_status(context).approved and not is_admin: + return SyftError(message="This UserCode is not approved") res = code.store_execution_output( context=context, diff --git a/packages/syft/src/syft/service/job/job_service.py b/packages/syft/src/syft/service/job/job_service.py index c63d3d9786d..654d83d3cc3 100644 --- a/packages/syft/src/syft/service/job/job_service.py +++ b/packages/syft/src/syft/service/job/job_service.py @@ -309,8 +309,8 @@ def create_job_for_user_code_id( context: AuthedServiceContext, user_code_id: UID, result: ActionObject | None = None, - log_stdout: str | None = None, - log_stderr: str | None = None, + log_stdout: str = "", + log_stderr: str = "", status: JobStatus = JobStatus.CREATED, add_code_owner_read_permissions: bool = True, ) -> Job | SyftError: diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index e09fbcd73bb..0da2c5dbcc3 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -37,7 +37,6 @@ from ...types.uid import UID from ...util import options from ...util.colors import SURFACE -from ...util.decorators import deprecated from ...util.markdown import markdown_as_class_with_fields from ...util.notebook_ui.icons import Icon from ...util.util import prompt_warning_message @@ -793,7 +792,7 @@ def _create_output_history_for_deposited_result( def deposit_result( self, result: Any, - logs: str | None = None, + logs: str = "", add_code_owner_read_permissions: bool = True, ) -> Job | SyftError: """ @@ -843,11 +842,11 @@ def deposit_result( return job - @deprecated( - return_syfterror=True, - reason="accept_by_depositing_result has been removed. Use approve instead to " - "approve this request, or deposit_result to deposit a new result.", - ) + # @deprecated( + # return_syfterror=True, + # reason="accept_by_depositing_result has been removed. Use approve instead to " + # "approve this request, or deposit_result to deposit a new result.", + # ) def accept_by_depositing_result( self, result: Any, force: bool = False ) -> SyftError | SyftSuccess: diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 88ba6587889..4b9f5b6711b 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -702,7 +702,7 @@ def syft_get_diffs(self, ext_obj: Self) -> list["AttrDiff"]: diff_attrs.append(diff_attr) return diff_attrs - def _get_api(self) -> SyftAPI | SyftError: + def _get_api(self) -> "SyftAPI | SyftError": # relative from ..client.api import APIRegistry From a0b383ff407f0312d52b6284e18c47eeb6254be5 Mon Sep 17 00:00:00 2001 From: khoaguin Date: Thu, 30 May 2024 11:12:24 +0700 Subject: [PATCH 007/313] [syft/job] debugging `Job.wait` and `ActionObject.wait` --- .../src/syft/service/action/action_object.py | 17 +++++++++++------ .../src/syft/service/action/action_service.py | 15 +++++++++++---- packages/syft/src/syft/service/job/job_stash.py | 7 +------ 3 files changed, 23 insertions(+), 16 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 73e84331bde..29402d09d4b 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1402,12 +1402,17 @@ def wait(self, timeout: int | None = None) -> ActionObject | SyftError: obj_id = self.id counter = 0 - while api and not api.services.action.is_resolved(obj_id): - time.sleep(1) - if timeout is not None: - counter += 1 - if counter > timeout: - return SyftError(message="Reached Timeout!") + while api: + obj_resolved = api.services.action.is_resolved(obj_id) + print(f"inside ActionObject.wait. {obj_resolved = }. {type(obj_resolved)}") + if "You do not have the permissions for mock execution" in obj_resolved: + return SyftError(message=obj_resolved) + if not obj_resolved: + time.sleep(1) + if timeout is not None: + counter += 1 + if counter > timeout: + return SyftError(message="Reached Timeout!") return self diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 9c3808e54f1..9a3ba935113 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -139,8 +139,6 @@ def is_resolved( uid: UID, ) -> Result[Ok[bool], Err[str]]: """Get an object from the action store""" - # relative - result = self._get(context, uid) if result.is_ok(): obj = result.ok() @@ -148,7 +146,7 @@ def is_resolved( result = self.resolve_links( context, obj.syft_action_data.action_object_id.id ) - + print(f"inside ActionService.is_resolved (2). {result = }") # Checking in case any error occurred if result.is_err(): return result @@ -157,11 +155,20 @@ def is_resolved( # If it's a leaf but not resolved yet, return false elif not obj.syft_resolved: + user_code_service = context.node.get_service("usercodeservice") + if not user_code_service.is_execution_on_owned_args_allowed(context): + return Err( + "You do not have the permissions for mock execution, please contact the admin" + ) + print( + f"inside ActionService.is_resolved (3). {result = }. {obj.syft_resolved = }" + ) return Ok(False) + print(f"inside ActionService.is_resolved (4). {obj = }.") # If it's not an action data link or non resolved (empty). It's resolved return Ok(True) - + print(f"inside ActionService.is_resolved (5). {result = }.") # If it's not in the store or permission error, return the error return result diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 5800d5b57a0..1a0e5cd2bb8 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -315,7 +315,7 @@ def fetch(self) -> None: ) job: Job | None = api.make_call(call) if job is None: - return + return None self.resolved = job.resolved if job.resolved: self.result = job.result @@ -659,11 +659,6 @@ def wait( if self.resolved: return self.resolve - if not api.services.code.is_execution_on_owned_args_allowed(): - return SyftError( - message="You do not have the permissions for mock execution, please contact the admin." - ) - if not job_only and self.result is not None: self.result.wait(timeout) From 4559d72827c512be9596311c681f8cc31f24f8ef Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 30 May 2024 09:05:46 +0200 Subject: [PATCH 008/313] hacks until DS can read the results --- packages/syft/src/syft/client/syncing.py | 22 +++- .../syft/src/syft/service/code/user_code.py | 4 +- .../syft/service/code/user_code_service.py | 100 +++++++++--------- .../syft/src/syft/service/job/job_stash.py | 5 +- .../src/syft/service/output/output_service.py | 19 ++++ .../syft/src/syft/service/request/request.py | 18 ++-- .../src/syft/service/sync/resolve_widget.py | 2 +- 7 files changed, 104 insertions(+), 66 deletions(-) diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index 428117634ef..d6d7a7179e2 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -3,6 +3,7 @@ # relative from ..abstract_node import NodeSideType from ..node.credentials import SyftVerifyKey +from ..service.job.job_stash import Job from ..service.response import SyftError from ..service.response import SyftSuccess from ..service.sync.diff_state import NodeDiff @@ -62,9 +63,15 @@ def compare_clients( filter_by_email: str | None = None, filter_by_type: type | None = None, ) -> NodeDiff: + from_state = from_client.get_sync_state() + to_state = to_client.get_sync_state() + if not from_state: + return from_state + if not to_state: + return to_state return compare_states( - from_client.get_sync_state(), - to_client.get_sync_state(), + from_state=from_state, + to_state=to_state, include_ignored=include_ignored, include_same=include_same, filter_by_email=filter_by_email, @@ -117,6 +124,15 @@ def handle_sync_batch( src_client = obj_diff_batch.source_client tgt_client = obj_diff_batch.target_client + + # make sure dependent request is approved before syncing the job + if obj_diff_batch.root_type == Job and sync_direction == SyncDirection.HIGH_TO_LOW: + job = obj_diff_batch.root.get_obj() + requests = [r for r in src_client.requests if r.code_id == job.user_code_id] + # NOTE: how to handle 0 or multiple requests? + if requests: + requests[0].approve() + src_resolved_state, tgt_resolved_state = obj_diff_batch.create_new_resolved_states() obj_diff_batch.decision = decision @@ -130,7 +146,7 @@ def handle_sync_batch( getattr(obj_diff_batch.user_code_high, "user_verify_key", None) or obj_diff_batch.user_verify_key_high ) - share_private_data_for_diff = share_private_data[diff.object_id] + share_private_data_for_diff = True mockify_for_diff = mockify[diff.object_id] instruction = SyncInstruction.from_batch_decision( diff=diff, diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 0bcb6ce3aea..67aaa158c05 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -568,7 +568,8 @@ def output_history(self) -> list[ExecutionOutput] | SyftError: def get_output_history( self, context: AuthedServiceContext ) -> list[ExecutionOutput] | SyftError: - if not self.get_status(context).approved: + is_admin = context.role == ServiceRole.ADMIN + if not self.get_status(context).approved and False: return SyftError( message="Execution denied, Please wait for the code to be approved" ) @@ -593,6 +594,7 @@ def store_execution_output( output_service = context.node.get_service("outputservice") output_service = cast(OutputService, output_service) + # use DS verify key here execution_result = output_service.create( context, user_code_id=self.id, diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 9f9c181266e..c47ddb04771 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -1,7 +1,6 @@ # stdlib from typing import Any from typing import TypeVar -from typing import cast # third party from result import Err @@ -336,7 +335,7 @@ def is_execution_allowed( context: AuthedServiceContext, output_policy: OutputPolicy | None, ) -> bool | SyftSuccess | SyftError | SyftNotReady: - if not code.get_status(context).approved: + if not code.get_status(context).approved and False: return code.status.get_status_message() # Check if the user has permission to execute the code. elif not (has_code_permission := self.has_code_permission(code, context)): @@ -462,56 +461,57 @@ def _call( input_policy = code.get_input_policy(context) # Check output policy - output_policy = code.get_output_policy(context) if not override_execution_permission: output_history = code.get_output_history(context=context) if isinstance(output_history, SyftError): return Err(output_history.message) - can_execute = self.is_execution_allowed( + output_policy = code.get_output_policy(context) + + can_execute = output_policy and self.is_execution_allowed( code=code, context=context, output_policy=output_policy, ) if not can_execute: - if not code.is_output_policy_approved(context): - return Err( - "Execution denied: Your code is waiting for approval" - ) - if not (is_valid := output_policy._is_valid(context)): # type: ignore - if len(output_history) > 0 and not skip_read_cache: - last_executed_output = output_history[-1] - # Check if the inputs of the last executed output match - # against the current input - if ( - input_policy is not None - and not last_executed_output.check_input_ids( - kwargs=kwarg2id - ) - ): - inp_policy_validation = input_policy._is_valid( - context, - usr_input_kwargs=kwarg2id, - code_item_id=code.id, - ) - if inp_policy_validation.is_err(): - return inp_policy_validation - - result: Result[ActionObject, str] = resolve_outputs( - context=context, - output_ids=last_executed_output.output_ids, + # if not code.is_output_policy_approved(context): + # return Err( + # "Execution denied: Your code is waiting for approval" + # ) + # if not (is_valid := output_policy._is_valid(context)): # type: ignore + if len(output_history) > 0 and not skip_read_cache: + last_executed_output = output_history[-1] + # Check if the inputs of the last executed output match + # against the current input + if ( + input_policy is not None + and not last_executed_output.check_input_ids( + kwargs=kwarg2id + ) + ): + inp_policy_validation = input_policy._is_valid( + context, + usr_input_kwargs=kwarg2id, + code_item_id=code.id, ) - if result.is_err(): - return result - - res = delist_if_single(result.ok()) - return Ok( - CachedSyftObject( - result=res, - error_msg=is_valid.message, - ) + if inp_policy_validation.is_err(): + return inp_policy_validation + + result: Result[ActionObject, str] = resolve_outputs( + context=context, + output_ids=last_executed_output.output_ids, + ) + if result.is_err(): + return result + + res = delist_if_single(result.ok()) + return Ok( + CachedSyftObject( + result=res, + error_msg="", ) - else: - return cast(Err, is_valid.to_result()) + ) + # else: + # return cast(Err, is_valid.to_result()) return can_execute.to_result() # type: ignore # Execute the code item @@ -540,15 +540,15 @@ def _call( # this currently only works for nested syft_functions # and admins executing on high side (TODO, decide if we want to increment counter) - if not skip_fill_cache and output_policy is not None: - res = code.store_execution_output( - context=context, - outputs=result, - job_id=context.job_id, - input_ids=kwarg2id, - ) - if isinstance(res, SyftError): - return Err(res.message) + # if not skip_fill_cache and output_policy is not None: + res = code.store_execution_output( + context=context, + outputs=result, + job_id=context.job_id, + input_ids=kwarg2id, + ) + if isinstance(res, SyftError): + return Err(res.message) # output_policy.update_policy(context, result) # code.output_policy = output_policy diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 2943913cf73..24907749c2b 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -715,8 +715,9 @@ def get_sync_dependencies(self, context: AuthedServiceContext) -> list[UID]: # output = context.node.get_service("outputservice").get_by_job_id( # type: ignore context, self.id ) - if isinstance(output, SyftError): - return output + if isinstance(output, SyftError) or output is None: + # TODO: remove before merge + raise ValueError("Could not get output for job") elif output is not None: dependencies.append(output.id) diff --git a/packages/syft/src/syft/service/output/output_service.py b/packages/syft/src/syft/service/output/output_service.py index 3d32a3e622a..a03275e9803 100644 --- a/packages/syft/src/syft/service/output/output_service.py +++ b/packages/syft/src/syft/service/output/output_service.py @@ -23,6 +23,7 @@ from ...types.uid import UID from ...util.telemetry import instrument from ..action.action_object import ActionObject +from ..action.action_permissions import ActionObjectPermission from ..context import AuthedServiceContext from ..response import SyftError from ..service import AbstractService @@ -281,6 +282,24 @@ def create( res = self.stash.set(context.credentials, output) return res + @service_method( + path="output.set", + name="set", + roles=ADMIN_ROLE_LEVEL, + ) + def set( + self, + context: AuthedServiceContext, + obj: ExecutionOutput, + add_permissions: list[ActionObjectPermission] | None = None, + ) -> ExecutionOutput | SyftError: + res = self.stash.set( + context.credentials, + obj, + add_permissions=add_permissions, + ) + return res + @service_method( path="output.get_by_user_code_id", name="get_by_user_code_id", diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 0da2c5dbcc3..3957976046e 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -549,18 +549,18 @@ def icon(self) -> str: @property def status(self) -> RequestStatus: - if len(self.history) == 0: - return RequestStatus.PENDING + # if len(self.history) == 0: + # return RequestStatus.PENDING - all_changes_applied = all(self.current_change_state.values()) and ( - len(self.current_change_state) == len(self.changes) - ) + # all_changes_applied = all(self.current_change_state.values()) and ( + # len(self.current_change_state) == len(self.changes) + # ) - request_status = ( - RequestStatus.APPROVED if all_changes_applied else RequestStatus.REJECTED - ) + # request_status = ( + # RequestStatus.APPROVED if all_changes_applied else RequestStatus.REJECTED + # ) - return request_status + return RequestStatus.APPROVED def approve( self, diff --git a/packages/syft/src/syft/service/sync/resolve_widget.py b/packages/syft/src/syft/service/sync/resolve_widget.py index 261ed28e075..c12883979bc 100644 --- a/packages/syft/src/syft/service/sync/resolve_widget.py +++ b/packages/syft/src/syft/service/sync/resolve_widget.py @@ -695,7 +695,7 @@ def __getitem__(self, index: int) -> widgets.Widget: return self.children[index] def on_paginate(self, index: int) -> None: - self.container.children = [self.children[index]] + self.container.children = [self.children[index]] if self.children else [] if self.on_paginate_callback: self.on_paginate_callback(index) From 260381b87cdb3b9c5e5bc9e30c4d77a3732868d5 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 30 May 2024 10:22:01 +0200 Subject: [PATCH 009/313] exclude_sync_diff_attrs Request changes and history --- .../syft/src/syft/service/request/request.py | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 3957976046e..c40fdfcc45e 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -399,7 +399,7 @@ class Request(SyncableSyftObject): "changes", "requesting_user_verify_key", ] - __exclude_sync_diff_attrs__ = ["node_uid"] + __exclude_sync_diff_attrs__ = ["node_uid", "changes", "history"] __table_sort_attr__ = "Request time" def _repr_html_(self) -> Any: @@ -549,18 +549,20 @@ def icon(self) -> str: @property def status(self) -> RequestStatus: - # if len(self.history) == 0: - # return RequestStatus.PENDING + if self.code.output_history: # Node side type? + return RequestStatus.APPROVED + if len(self.history) == 0: + return RequestStatus.PENDING - # all_changes_applied = all(self.current_change_state.values()) and ( - # len(self.current_change_state) == len(self.changes) - # ) + all_changes_applied = all(self.current_change_state.values()) and ( + len(self.current_change_state) == len(self.changes) + ) - # request_status = ( - # RequestStatus.APPROVED if all_changes_applied else RequestStatus.REJECTED - # ) + request_status = ( + RequestStatus.APPROVED if all_changes_applied else RequestStatus.REJECTED + ) - return RequestStatus.APPROVED + return request_status def approve( self, From 475ccd02e5ff613be9c88e5562b58c0101703fdd Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 30 May 2024 10:25:10 +0200 Subject: [PATCH 010/313] revert some changes --- .../syft/src/syft/service/job/job_stash.py | 5 ++--- .../src/syft/service/output/output_service.py | 19 ------------------- 2 files changed, 2 insertions(+), 22 deletions(-) diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 24907749c2b..2943913cf73 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -715,9 +715,8 @@ def get_sync_dependencies(self, context: AuthedServiceContext) -> list[UID]: # output = context.node.get_service("outputservice").get_by_job_id( # type: ignore context, self.id ) - if isinstance(output, SyftError) or output is None: - # TODO: remove before merge - raise ValueError("Could not get output for job") + if isinstance(output, SyftError): + return output elif output is not None: dependencies.append(output.id) diff --git a/packages/syft/src/syft/service/output/output_service.py b/packages/syft/src/syft/service/output/output_service.py index a03275e9803..3d32a3e622a 100644 --- a/packages/syft/src/syft/service/output/output_service.py +++ b/packages/syft/src/syft/service/output/output_service.py @@ -23,7 +23,6 @@ from ...types.uid import UID from ...util.telemetry import instrument from ..action.action_object import ActionObject -from ..action.action_permissions import ActionObjectPermission from ..context import AuthedServiceContext from ..response import SyftError from ..service import AbstractService @@ -282,24 +281,6 @@ def create( res = self.stash.set(context.credentials, output) return res - @service_method( - path="output.set", - name="set", - roles=ADMIN_ROLE_LEVEL, - ) - def set( - self, - context: AuthedServiceContext, - obj: ExecutionOutput, - add_permissions: list[ActionObjectPermission] | None = None, - ) -> ExecutionOutput | SyftError: - res = self.stash.set( - context.credentials, - obj, - add_permissions=add_permissions, - ) - return res - @service_method( path="output.get_by_user_code_id", name="get_by_user_code_id", From 681fd44456a4c1143b057049a4b00f360a2a3212 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 30 May 2024 12:02:47 +0200 Subject: [PATCH 011/313] store result on high side --- packages/syft/src/syft/client/syncing.py | 15 +++++++------ .../syft/service/code/user_code_service.py | 22 +++++++++++-------- 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index d6d7a7179e2..abc2f2faa29 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -3,7 +3,6 @@ # relative from ..abstract_node import NodeSideType from ..node.credentials import SyftVerifyKey -from ..service.job.job_stash import Job from ..service.response import SyftError from ..service.response import SyftSuccess from ..service.sync.diff_state import NodeDiff @@ -126,12 +125,12 @@ def handle_sync_batch( tgt_client = obj_diff_batch.target_client # make sure dependent request is approved before syncing the job - if obj_diff_batch.root_type == Job and sync_direction == SyncDirection.HIGH_TO_LOW: - job = obj_diff_batch.root.get_obj() - requests = [r for r in src_client.requests if r.code_id == job.user_code_id] - # NOTE: how to handle 0 or multiple requests? - if requests: - requests[0].approve() + # if obj_diff_batch.root_type == Job and sync_direction == SyncDirection.HIGH_TO_LOW: + # job = obj_diff_batch.root.get_obj() + # requests = [r for r in src_client.requests if r.code_id == job.user_code_id] + # # NOTE: how to handle 0 or multiple requests? + # if requests: + # requests[0].approve() src_resolved_state, tgt_resolved_state = obj_diff_batch.create_new_resolved_states() @@ -146,6 +145,8 @@ def handle_sync_batch( getattr(obj_diff_batch.user_code_high, "user_verify_key", None) or obj_diff_batch.user_verify_key_high ) + share_private_data_for_diff = share_private_data[diff.object_id] + # add sync private option to execution output share_private_data_for_diff = True mockify_for_diff = mockify[diff.object_id] instruction = SyncInstruction.from_batch_decision( diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index c47ddb04771..ba456e674b3 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -540,15 +540,19 @@ def _call( # this currently only works for nested syft_functions # and admins executing on high side (TODO, decide if we want to increment counter) - # if not skip_fill_cache and output_policy is not None: - res = code.store_execution_output( - context=context, - outputs=result, - job_id=context.job_id, - input_ids=kwarg2id, - ) - if isinstance(res, SyftError): - return Err(res.message) + # relative + from ...node.node import get_node_side_type + + is_high_side = get_node_side_type() == "high" + if not skip_fill_cache and output_policy is not None or is_high_side: + res = code.store_execution_output( + context=context, + outputs=result, + job_id=context.job_id, + input_ids=kwarg2id, + ) + if isinstance(res, SyftError): + return Err(res.message) # output_policy.update_policy(context, result) # code.output_policy = output_policy From 7c7861d288f907c0c289809cb6d1bd44362c2a5b Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 30 May 2024 12:08:46 +0200 Subject: [PATCH 012/313] revert some changes --- packages/syft/src/syft/service/code/user_code.py | 15 ++++++--------- .../src/syft/service/code/user_code_service.py | 2 +- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 67aaa158c05..1dd6446cc24 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -280,7 +280,8 @@ class UserCode(SyncableSyftObject): input_kwargs: list[str] enclave_metadata: EnclaveMetadata | None = None submit_time: DateTime | None = None - uses_domain: bool = False # tracks if the code calls domain.something, variable is set during parsing + # tracks if the code calls domain.something, variable is set during parsing + uses_domain: bool = False nested_codes: dict[str, tuple[LinkedObject, dict]] | None = {} worker_pool_name: str | None = None @@ -568,12 +569,6 @@ def output_history(self) -> list[ExecutionOutput] | SyftError: def get_output_history( self, context: AuthedServiceContext ) -> list[ExecutionOutput] | SyftError: - is_admin = context.role == ServiceRole.ADMIN - if not self.get_status(context).approved and False: - return SyftError( - message="Execution denied, Please wait for the code to be approved" - ) - output_service = cast(OutputService, context.node.get_service("outputservice")) return output_service.get_by_user_code_id(context, self.id) @@ -594,7 +589,6 @@ def store_execution_output( output_service = context.node.get_service("outputservice") output_service = cast(OutputService, output_service) - # use DS verify key here execution_result = output_service.create( context, user_code_id=self.id, @@ -875,7 +869,10 @@ def _ephemeral_node_call( n_consumers=n_consumers, deploy_to="python", ) - ep_client = ep_node.login(email="info@openmined.org", password="changethis") # nosec + ep_client = ep_node.login( + email="info@openmined.org", + password="changethis", + ) # nosec self.input_policy_init_kwargs = cast(dict, self.input_policy_init_kwargs) for node_id, obj_dict in self.input_policy_init_kwargs.items(): # api = APIRegistry.api_for( diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index ba456e674b3..72754be74dd 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -335,7 +335,7 @@ def is_execution_allowed( context: AuthedServiceContext, output_policy: OutputPolicy | None, ) -> bool | SyftSuccess | SyftError | SyftNotReady: - if not code.get_status(context).approved and False: + if not code.get_status(context).approved: return code.status.get_status_message() # Check if the user has permission to execute the code. elif not (has_code_permission := self.has_code_permission(code, context)): From 2f40df704e3b7f1560f5fe118d2232564cccb230 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 30 May 2024 12:15:34 +0200 Subject: [PATCH 013/313] handle sync case in code.call --- .../syft/service/code/user_code_service.py | 82 ++++++++++--------- 1 file changed, 42 insertions(+), 40 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 72754be74dd..0ccf29cffde 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -1,6 +1,7 @@ # stdlib from typing import Any from typing import TypeVar +from typing import cast # third party from result import Err @@ -459,7 +460,11 @@ def _call( kwarg2id = map_kwargs_to_id(kwargs) input_policy = code.get_input_policy(context) + # relative + from ...node.node import get_node_side_type + is_high_side = get_node_side_type() == "high" + has_side = get_node_side_type() is not None # Check output policy if not override_execution_permission: output_history = code.get_output_history(context=context) @@ -473,45 +478,45 @@ def _call( output_policy=output_policy, ) if not can_execute: - # if not code.is_output_policy_approved(context): - # return Err( - # "Execution denied: Your code is waiting for approval" - # ) - # if not (is_valid := output_policy._is_valid(context)): # type: ignore - if len(output_history) > 0 and not skip_read_cache: - last_executed_output = output_history[-1] - # Check if the inputs of the last executed output match - # against the current input - if ( - input_policy is not None - and not last_executed_output.check_input_ids( - kwargs=kwarg2id - ) - ): - inp_policy_validation = input_policy._is_valid( - context, - usr_input_kwargs=kwarg2id, - code_item_id=code.id, - ) - if inp_policy_validation.is_err(): - return inp_policy_validation - - result: Result[ActionObject, str] = resolve_outputs( - context=context, - output_ids=last_executed_output.output_ids, + if not has_side and not code.is_output_policy_approved(context): + return Err( + "Execution denied: Your code is waiting for approval" ) - if result.is_err(): - return result - - res = delist_if_single(result.ok()) - return Ok( - CachedSyftObject( - result=res, - error_msg="", + if has_side or not (is_valid := output_policy._is_valid(context)): + if len(output_history) > 0 and not skip_read_cache: + last_executed_output = output_history[-1] + # Check if the inputs of the last executed output match + # against the current input + if ( + input_policy is not None + and not last_executed_output.check_input_ids( + kwargs=kwarg2id + ) + ): + inp_policy_validation = input_policy._is_valid( + context, + usr_input_kwargs=kwarg2id, + code_item_id=code.id, + ) + if inp_policy_validation.is_err(): + return inp_policy_validation + + result: Result[ActionObject, str] = resolve_outputs( + context=context, + output_ids=last_executed_output.output_ids, ) - ) - # else: - # return cast(Err, is_valid.to_result()) + if result.is_err(): + return result + + res = delist_if_single(result.ok()) + return Ok( + CachedSyftObject( + result=res, + error_msg="", + ) + ) + else: + return cast(Err, is_valid.to_result()) return can_execute.to_result() # type: ignore # Execute the code item @@ -540,10 +545,7 @@ def _call( # this currently only works for nested syft_functions # and admins executing on high side (TODO, decide if we want to increment counter) - # relative - from ...node.node import get_node_side_type - is_high_side = get_node_side_type() == "high" if not skip_fill_cache and output_policy is not None or is_high_side: res = code.store_execution_output( context=context, From 9519d6eb17e052ca5466377f39bba96902f2c350 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Thu, 30 May 2024 13:25:41 +0200 Subject: [PATCH 014/313] remove accept_by_dep_result from all notebooks --- .../api/0.8/02-review-code-and-approve.ipynb | 48 +- notebooks/api/0.8/05-custom-policy.ipynb | 30 +- .../data-owner/03-messages-and-requests.ipynb | 79 +- ...ave-single-notebook-high-low-network.ipynb | 1087 ----------------- .../tutorials/hello-syft/01-hello-syft.ipynb | 20 +- .../model-auditing/colab/01-user-log.ipynb | 18 +- .../02-data-owner-review-approve-code.ipynb | 14 +- .../01-reading-from-a-csv.ipynb | 51 +- ...lecting-data-finding-common-complain.ipynb | 8 +- ...orough-has-the-most-noise-complaints.ipynb | 11 +- ...-weekday-bike-most-groupby-aggregate.ipynb | 8 +- ...ing-dataframes-scraping-weather-data.ipynb | 8 +- ...rations-which-month-was-the-snowiest.ipynb | 8 +- .../07-cleaning-up-messy-data.ipynb | 8 +- .../08-how-to-deal-with-timestamps.ipynb | 8 +- 15 files changed, 75 insertions(+), 1331 deletions(-) delete mode 100644 notebooks/tutorials/enclaves/Enclave-single-notebook-high-low-network.ipynb diff --git a/notebooks/api/0.8/02-review-code-and-approve.ipynb b/notebooks/api/0.8/02-review-code-and-approve.ipynb index 4faedd441c0..ce15d881245 100644 --- a/notebooks/api/0.8/02-review-code-and-approve.ipynb +++ b/notebooks/api/0.8/02-review-code-and-approve.ipynb @@ -346,23 +346,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Sharing results back to the Data Scientist" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "By calling this function we attach the result of the function to the original request\n", + "### Approving a request\n", "\n", - "`request.accept_by_depositing_result(real_result)`" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's say we accidentally submit incorrect results, we can correct it by using override it using the `force=True` flag" + "By calling `request.approve()`, the data scientist can execute their function on the real data, and obtain the result" ] }, { @@ -374,31 +360,7 @@ "outputs": [], "source": [ "# Uploaded wrong result - we shared mock_result instead of the real_result\n", - "result = request.accept_by_depositing_result(mock_result)\n", - "result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "assert isinstance(result, sy.SyftSuccess)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# upload correct result\n", - "result = request.accept_by_depositing_result(real_result, force=True)\n", + "result = request.approve()\n", "result" ] }, @@ -472,7 +434,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = request.accept_by_depositing_result(real_result)\n", + "result = request.approve()\n", "result" ] }, @@ -524,7 +486,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.2" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/api/0.8/05-custom-policy.ipynb b/notebooks/api/0.8/05-custom-policy.ipynb index fac27bfcbe8..5adaacd9a77 100644 --- a/notebooks/api/0.8/05-custom-policy.ipynb +++ b/notebooks/api/0.8/05-custom-policy.ipynb @@ -521,22 +521,6 @@ "cell_type": "code", "execution_count": null, "id": "24", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# syft absolute\n", - "from syft.service.response import SyftError\n", - "\n", - "final_result = request.accept_by_depositing_result(result)\n", - "assert isinstance(final_result, SyftError)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -546,7 +530,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "25", "metadata": { "tags": [] }, @@ -559,7 +543,7 @@ { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "26", "metadata": {}, "outputs": [], "source": [ @@ -570,7 +554,7 @@ { "cell_type": "code", "execution_count": null, - "id": "28", + "id": "27", "metadata": { "tags": [] }, @@ -582,7 +566,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29", + "id": "28", "metadata": { "tags": [] }, @@ -594,7 +578,7 @@ { "cell_type": "code", "execution_count": null, - "id": "30", + "id": "29", "metadata": { "tags": [] }, @@ -610,7 +594,7 @@ { "cell_type": "code", "execution_count": null, - "id": "31", + "id": "30", "metadata": { "tags": [] }, @@ -637,7 +621,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0rc1" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb b/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb index 8e7a1618425..c05d6f7d556 100644 --- a/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb +++ b/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb @@ -360,67 +360,6 @@ "cell_type": "markdown", "id": "32", "metadata": {}, - "source": [ - "### Substituting" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [ - "mean_request = admin_client.requests[-2]\n", - "mean_request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "34", - "metadata": {}, - "outputs": [], - "source": [ - "admin_asset = admin_client.datasets[0].assets[0]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "35", - "metadata": {}, - "outputs": [], - "source": [ - "result = mean_request.code.unsafe_function(data=admin_asset)\n", - "result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36", - "metadata": {}, - "outputs": [], - "source": [ - "mean_request.accept_by_depositing_result(result)\n", - "mean_request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "37", - "metadata": {}, - "outputs": [], - "source": [ - "admin_client.projects[0].requests" - ] - }, - { - "cell_type": "markdown", - "id": "38", - "metadata": {}, "source": [ "### Rejecting" ] @@ -428,7 +367,7 @@ { "cell_type": "code", "execution_count": null, - "id": "39", + "id": "33", "metadata": {}, "outputs": [], "source": [ @@ -439,7 +378,7 @@ { "cell_type": "code", "execution_count": null, - "id": "40", + "id": "34", "metadata": {}, "outputs": [], "source": [ @@ -449,7 +388,7 @@ { "cell_type": "code", "execution_count": null, - "id": "41", + "id": "35", "metadata": {}, "outputs": [], "source": [ @@ -459,20 +398,12 @@ { "cell_type": "code", "execution_count": null, - "id": "42", + "id": "36", "metadata": {}, "outputs": [], "source": [ "admin_client.projects[0]" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "43", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -491,7 +422,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.8" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/enclaves/Enclave-single-notebook-high-low-network.ipynb b/notebooks/tutorials/enclaves/Enclave-single-notebook-high-low-network.ipynb deleted file mode 100644 index 771f0ad4389..00000000000 --- a/notebooks/tutorials/enclaves/Enclave-single-notebook-high-low-network.ipynb +++ /dev/null @@ -1,1087 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# third party\n", - "from recordlinkage.datasets import load_febrl4\n", - "\n", - "# syft absolute\n", - "import syft as sy" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "# Create Nodes" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "## Staging Low side" - ] - }, - { - "cell_type": "markdown", - "id": "3", - "metadata": {}, - "source": [ - "create enclave node" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "embassador_node_low = sy.orchestra.launch(\n", - " name=\"ambassador node\",\n", - " node_side_type=\"low\",\n", - " local_db=True,\n", - " reset=True,\n", - " # enable_warnings=True,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "Create canada node & italy node" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "ca_node_low = sy.orchestra.launch(\n", - " name=\"canada-1\",\n", - " node_side_type=\"low\",\n", - " local_db=True,\n", - " reset=True,\n", - " # enable_warnings=True,\n", - ")\n", - "it_node_low = sy.orchestra.launch(\n", - " name=\"italy-1\",\n", - " node_side_type=\"low\",\n", - " local_db=True,\n", - " reset=True,\n", - " # enable_warnings=True,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "gateway_node_low = sy.orchestra.launch(\n", - " name=\"gateway-1\",\n", - " node_type=\"gateway\",\n", - " node_side_type=\"low\",\n", - " local_db=True,\n", - " reset=True,\n", - " dev_mode=True,\n", - " association_request_auto_approval=True,\n", - " # enable_warnings=True\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "## High side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "enclave_node_high = sy.orchestra.launch(\n", - " name=\"enclave node\",\n", - " node_type=\"enclave\",\n", - " reset=True,\n", - " # enable_warnings=True,\n", - ")\n", - "ca_node_high = sy.orchestra.launch(\n", - " name=\"canada-2\",\n", - " local_db=True,\n", - " reset=True,\n", - " # enable_warnings=True,\n", - ")\n", - "it_node_high = sy.orchestra.launch(\n", - " name=\"italy-2\",\n", - " local_db=True,\n", - " reset=True,\n", - " # enable_warnings=True,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "gateway_node_high = sy.orchestra.launch(\n", - " name=\"gateway-2\",\n", - " node_type=\"gateway\",\n", - " local_db=True,\n", - " reset=True,\n", - " dev_mode=True,\n", - " association_request_auto_approval=True,\n", - " # enable_warnings=True,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "11", - "metadata": {}, - "source": [ - "# DOs" - ] - }, - { - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "## Login" - ] - }, - { - "cell_type": "markdown", - "id": "13", - "metadata": {}, - "source": [ - "### Staging Low side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "do_ca_client_low = ca_node_low.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "do_it_client_low = it_node_low.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "embassador_client_low = embassador_node_low.login(\n", - " email=\"info@openmined.org\", password=\"changethis\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "15", - "metadata": {}, - "source": [ - "### Production High side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "do_ca_client_high = ca_node_high.login(\n", - " email=\"info@openmined.org\", password=\"changethis\"\n", - ")\n", - "do_it_client_high = it_node_high.login(\n", - " email=\"info@openmined.org\", password=\"changethis\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "17", - "metadata": {}, - "source": [ - "## Connect to network" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# TODO: add security layer here" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "enclave_client_high = enclave_node_high.client" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# gateway_root_client.register(name=\"\", email=\"info@openmined.org\", password=\"changethis\")\n", - "# gateway_root_client.register(name=\"\", email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "res = do_ca_client_low.connect_to_gateway(\n", - " handle=gateway_node_low\n", - ") # add credentials here\n", - "res" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "res = do_it_client_low.connect_to_gateway(\n", - " handle=gateway_node_low\n", - ") # add credentials here\n", - "res" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "23", - "metadata": {}, - "outputs": [], - "source": [ - "res = do_ca_client_high.connect_to_gateway(handle=gateway_node_high)\n", - "res = do_it_client_high.connect_to_gateway(handle=gateway_node_high)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "## Also for ambassador" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25", - "metadata": {}, - "outputs": [], - "source": [ - "# TODO: who is going to be responsible for connecting the enclave to the gateway\n", - "res = enclave_client_high.connect_to_gateway(handle=gateway_node_high)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [ - "res = embassador_client_low.connect_to_gateway(\n", - " handle=gateway_node_low\n", - ") # add credentials here" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "## Upload dataset" - ] - }, - { - "cell_type": "markdown", - "id": "28", - "metadata": {}, - "source": [ - "### Staging Low side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [ - "# Using public datasets from Freely Extensible Biomedical Record Linkage (Febrl)\n", - "canada_census_data_low, italy_census_data_low = load_febrl4()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [ - "for dataset, client, country in zip(\n", - " [canada_census_data_low, italy_census_data_low],\n", - " [do_ca_client_low, do_it_client_low],\n", - " [\"Canada\", \"Italy\"],\n", - "):\n", - " private_data, mock_data = dataset[:2500], dataset[2500:]\n", - " dataset = sy.Dataset(\n", - " name=f\"{country} - FEBrl Census Data\",\n", - " description=\"abc\",\n", - " asset_list=[\n", - " sy.Asset(\n", - " name=\"census_data\",\n", - " mock=mock_data,\n", - " data=private_data,\n", - " shape=private_data.shape,\n", - " mock_is_real=True,\n", - " )\n", - " ],\n", - " )\n", - " client.upload_dataset(dataset)" - ] - }, - { - "cell_type": "markdown", - "id": "31", - "metadata": {}, - "source": [ - "### Production High side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32", - "metadata": {}, - "outputs": [], - "source": [ - "# Using public datasets from Freely Extensible Biomedical Record Linkage (Febrl)\n", - "canada_census_data_high, italy_census_data_high = load_febrl4()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [ - "for dataset, client, country in zip(\n", - " [canada_census_data_high, italy_census_data_high],\n", - " [do_ca_client_high, do_it_client_high],\n", - " [\"Canada\", \"Italy\"],\n", - "):\n", - " private_data, mock_data = dataset[:2500], dataset[2500:]\n", - " dataset = sy.Dataset(\n", - " name=f\"{country} - FEBrl Census Data\",\n", - " description=\"abc\",\n", - " asset_list=[\n", - " sy.Asset(\n", - " name=\"census_data\",\n", - " mock=mock_data,\n", - " data=private_data,\n", - " shape=private_data.shape,\n", - " mock_is_real=True,\n", - " )\n", - " ],\n", - " )\n", - " client.upload_dataset(dataset)" - ] - }, - { - "cell_type": "markdown", - "id": "34", - "metadata": {}, - "source": [ - "## create accounts for DS" - ] - }, - { - "cell_type": "markdown", - "id": "35", - "metadata": {}, - "source": [ - "### Staging Low side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36", - "metadata": {}, - "outputs": [], - "source": [ - "for client in [do_ca_client_low, do_it_client_low]:\n", - " client.register(\n", - " name=\"Sheldon\",\n", - " email=\"sheldon@caltech.edu\",\n", - " password=\"changethis\",\n", - " password_verify=\"changethis\",\n", - " )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "37", - "metadata": {}, - "outputs": [], - "source": [ - "embassador_client_low.register(\n", - " name=\"Sheldon\",\n", - " email=\"sheldon@caltech.edu\",\n", - " password=\"changethis\",\n", - " password_verify=\"changethis\",\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "38", - "metadata": {}, - "source": [ - "## Create account for embassador" - ] - }, - { - "cell_type": "markdown", - "id": "39", - "metadata": {}, - "source": [ - "### Production High Side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "40", - "metadata": {}, - "outputs": [], - "source": [ - "for client in [do_ca_client_high, do_it_client_high]:\n", - " client.register(\n", - " name=\"Sheldon\",\n", - " email=\"sheldon@caltech.edu\",\n", - " password=\"changethis\",\n", - " password_verify=\"changethis\",\n", - " )" - ] - }, - { - "cell_type": "markdown", - "id": "41", - "metadata": {}, - "source": [ - "# DS Low Side" - ] - }, - { - "cell_type": "markdown", - "id": "42", - "metadata": {}, - "source": [ - "## DS Get proxy clients" - ] - }, - { - "cell_type": "markdown", - "id": "43", - "metadata": {}, - "source": [ - "### Staging Low side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "44", - "metadata": {}, - "outputs": [], - "source": [ - "ds_gateway_client_low = gateway_node_low.client" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "45", - "metadata": {}, - "outputs": [], - "source": [ - "assert len(ds_gateway_client_low.domains) == 3\n", - "ds_gateway_client_low.domains" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "46", - "metadata": {}, - "outputs": [], - "source": [ - "ds_ca_proxy_client_low = ds_gateway_client_low.domains[1].login(\n", - " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", - ")\n", - "ds_it_proxy_client_low = ds_gateway_client_low.domains[2].login(\n", - " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", - ")\n", - "ds_amb_proxy_client_low = ds_gateway_client_low.domains[0].login(\n", - " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "47", - "metadata": {}, - "source": [ - "## Find datasets" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "48", - "metadata": {}, - "outputs": [], - "source": [ - "canada_census_data = ds_ca_proxy_client_low.datasets[-1].assets[0]\n", - "italy_census_data = ds_it_proxy_client_low.datasets[-1].assets[0]" - ] - }, - { - "cell_type": "markdown", - "id": "49", - "metadata": {}, - "source": [ - "## Create Request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "50", - "metadata": {}, - "outputs": [], - "source": [ - "@sy.syft_function_single_use(\n", - " canada_census_data=canada_census_data, italy_census_data=italy_census_data\n", - ")\n", - "def compute_census_matches(canada_census_data, italy_census_data):\n", - " # third party\n", - " import recordlinkage\n", - "\n", - " # Index step\n", - " indexer = recordlinkage.Index()\n", - " indexer.block(\"given_name\")\n", - "\n", - " candidate_links = indexer.index(canada_census_data, italy_census_data)\n", - "\n", - " # Comparison step\n", - " compare_cl = recordlinkage.Compare()\n", - "\n", - " compare_cl.exact(\"given_name\", \"given_name\", label=\"given_name\")\n", - " compare_cl.string(\n", - " \"surname\", \"surname\", method=\"jarowinkler\", threshold=0.85, label=\"surname\"\n", - " )\n", - " compare_cl.exact(\"date_of_birth\", \"date_of_birth\", label=\"date_of_birth\")\n", - " compare_cl.exact(\"suburb\", \"suburb\", label=\"suburb\")\n", - " compare_cl.exact(\"state\", \"state\", label=\"state\")\n", - " compare_cl.string(\"address_1\", \"address_1\", threshold=0.85, label=\"address_1\")\n", - "\n", - " features = compare_cl.compute(\n", - " candidate_links, canada_census_data, italy_census_data\n", - " )\n", - "\n", - " # Classification step\n", - " matches = features[features.sum(axis=1) > 3]\n", - "\n", - " return len(matches)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "51", - "metadata": {}, - "outputs": [], - "source": [ - "# Checking result of mock data execution\n", - "mock_result = compute_census_matches(\n", - " canada_census_data=canada_census_data.mock,\n", - " italy_census_data=italy_census_data.mock,\n", - ")\n", - "mock_result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "52", - "metadata": {}, - "outputs": [], - "source": [ - "ds_amb_proxy_client_low.code.request_code_execution(compute_census_matches)" - ] - }, - { - "cell_type": "markdown", - "id": "53", - "metadata": {}, - "source": [ - "# Ambassador flow" - ] - }, - { - "cell_type": "markdown", - "id": "54", - "metadata": {}, - "source": [ - "## Check Code Staging Low Side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "55", - "metadata": {}, - "outputs": [], - "source": [ - "embassador_client_low.requests[0].code" - ] - }, - { - "cell_type": "markdown", - "id": "56", - "metadata": {}, - "source": [ - "## Login to Production High Side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "57", - "metadata": {}, - "outputs": [], - "source": [ - "amb_gateway_client_high = gateway_node_high.client" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "58", - "metadata": {}, - "outputs": [], - "source": [ - "assert len(amb_gateway_client_high.domains) == 2\n", - "amb_gateway_client_high.domains" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "59", - "metadata": {}, - "outputs": [], - "source": [ - "amb_ca_proxy_client_high = amb_gateway_client_high.domains[1].login(\n", - " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", - ")\n", - "amb_it_proxy_client_high = amb_gateway_client_high.domains[0].login(\n", - " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "60", - "metadata": {}, - "outputs": [], - "source": [ - "assert len(amb_gateway_client_high.enclaves) == 1" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "61", - "metadata": {}, - "outputs": [], - "source": [ - "amb_ca_proxy_client_high" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "62", - "metadata": {}, - "outputs": [], - "source": [ - "amb_enclave_proxy_client_high = amb_gateway_client_high.enclaves[0].login(\n", - " name=\"Sheldon\", email=\"sheldon@caltech.edu\", password=\"changethis\", register=True\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "63", - "metadata": {}, - "outputs": [], - "source": [ - "# # this also creates a guest client\n", - "# embassador_client_high = enclave_node_high.login(email=\"info@openmined.org\", password=\"changethis\",\n", - "# name=\"Signor Ambassador\", register=True)" - ] - }, - { - "cell_type": "markdown", - "id": "64", - "metadata": {}, - "source": [ - "## Find Datasets Production High side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "65", - "metadata": {}, - "outputs": [], - "source": [ - "canada_census_data_high = amb_ca_proxy_client_high.datasets[-1].assets[0]\n", - "italy_census_data_high = amb_it_proxy_client_high.datasets[-1].assets[0]" - ] - }, - { - "cell_type": "markdown", - "id": "66", - "metadata": {}, - "source": [ - "Copy code from the request" - ] - }, - { - "cell_type": "markdown", - "id": "67", - "metadata": {}, - "source": [ - "## Submit code Production High side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "68", - "metadata": {}, - "outputs": [], - "source": [ - "@sy.syft_function_single_use(\n", - " canada_census_data=canada_census_data_high, italy_census_data=italy_census_data_high\n", - ")\n", - "def compute_census_matches_high(canada_census_data, italy_census_data):\n", - " # third party\n", - " import recordlinkage\n", - "\n", - " # Index step\n", - " indexer = recordlinkage.Index()\n", - " indexer.block(\"given_name\")\n", - "\n", - " candidate_links = indexer.index(canada_census_data, italy_census_data)\n", - "\n", - " # Comparison step\n", - " compare_cl = recordlinkage.Compare()\n", - "\n", - " compare_cl.exact(\"given_name\", \"given_name\", label=\"given_name\")\n", - " compare_cl.string(\n", - " \"surname\", \"surname\", method=\"jarowinkler\", threshold=0.85, label=\"surname\"\n", - " )\n", - " compare_cl.exact(\"date_of_birth\", \"date_of_birth\", label=\"date_of_birth\")\n", - " compare_cl.exact(\"suburb\", \"suburb\", label=\"suburb\")\n", - " compare_cl.exact(\"state\", \"state\", label=\"state\")\n", - " compare_cl.string(\"address_1\", \"address_1\", threshold=0.85, label=\"address_1\")\n", - "\n", - " features = compare_cl.compute(\n", - " candidate_links, canada_census_data, italy_census_data\n", - " )\n", - "\n", - " # Classification step\n", - " matches = features[features.sum(axis=1) > 3]\n", - "\n", - " return len(matches)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "69", - "metadata": {}, - "outputs": [], - "source": [ - "# Checking result of mock data execution\n", - "mock_result = compute_census_matches_high(\n", - " canada_census_data=canada_census_data_high.mock,\n", - " italy_census_data=italy_census_data_high.mock,\n", - ")\n", - "mock_result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "70", - "metadata": {}, - "outputs": [], - "source": [ - "# note that this is not embassador_client_high.**code**.request_code_execution\n", - "amb_enclave_proxy_client_high.request_code_execution(compute_census_matches_high)" - ] - }, - { - "cell_type": "markdown", - "id": "71", - "metadata": {}, - "source": [ - "## DOs Approve Production High Side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "72", - "metadata": {}, - "outputs": [], - "source": [ - "do_ca_client_high.requests[0].approve()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "73", - "metadata": {}, - "outputs": [], - "source": [ - "do_it_client_high.requests[0].approve()" - ] - }, - { - "cell_type": "markdown", - "id": "74", - "metadata": {}, - "source": [ - "## Embassdor gets result from Production High Side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "75", - "metadata": {}, - "outputs": [], - "source": [ - "amb_enclave_proxy_client_high.code[-1].status" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "76", - "metadata": {}, - "outputs": [], - "source": [ - "result_pointer = amb_enclave_proxy_client_high.code.compute_census_matches_high(\n", - " canada_census_data=canada_census_data_high,\n", - " italy_census_data=italy_census_data_high,\n", - ")\n", - "\n", - "result_pointer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "77", - "metadata": {}, - "outputs": [], - "source": [ - "real_result = result_pointer.get()\n", - "real_result" - ] - }, - { - "cell_type": "markdown", - "id": "78", - "metadata": {}, - "source": [ - "## Ambassador Deposits Result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "79", - "metadata": {}, - "outputs": [], - "source": [ - "embassador_client_low.requests[0].accept_by_depositing_result(real_result)" - ] - }, - { - "cell_type": "markdown", - "id": "80", - "metadata": {}, - "source": [ - "# DS" - ] - }, - { - "cell_type": "markdown", - "id": "81", - "metadata": {}, - "source": [ - "## Get result from Staging Low Side" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "82", - "metadata": {}, - "outputs": [], - "source": [ - "ds_amb_proxy_client_low.code[-1].status" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "83", - "metadata": {}, - "outputs": [], - "source": [ - "result_pointer = ds_amb_proxy_client_low.code.compute_census_matches(\n", - " canada_census_data=canada_census_data,\n", - " italy_census_data=italy_census_data,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "84", - "metadata": {}, - "outputs": [], - "source": [ - "result_pointer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "85", - "metadata": {}, - "outputs": [], - "source": [ - "real_result = result_pointer.get()\n", - "real_result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "86", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.2" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": { - "height": "calc(100% - 180px)", - "left": "10px", - "top": "150px", - "width": "358.391px" - }, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb index 8a7f6a674d2..7444b8fbf23 100644 --- a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb +++ b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb @@ -432,7 +432,7 @@ "id": "39", "metadata": {}, "source": [ - "### Share the real result with the Data Scientist" + "### Approving the request" ] }, { @@ -442,9 +442,9 @@ "metadata": {}, "outputs": [], "source": [ - "result = request.accept_by_depositing_result(real_result)\n", - "print(result)\n", - "assert isinstance(result, sy.SyftSuccess)" + "result = request.approve()\n", + "assert isinstance(result, sy.SyftSuccess)\n", + "result" ] }, { @@ -454,7 +454,7 @@ "source": [ "## Data Scientist - Part 2\n", "\n", - "### Fetch Real Result" + "### Computing the Real Result" ] }, { @@ -580,14 +580,6 @@ "source": [ "node.land()" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "54", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -606,7 +598,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb b/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb index 036c21f9ed6..eb0d3df04a9 100644 --- a/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb +++ b/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb @@ -572,7 +572,7 @@ "id": "53", "metadata": {}, "source": [ - "Once the model owner feels confident that this code is not malicious, we can run the function on the real data." + "Once the model owner feels confident that this code is not malicious, we can run the function on the real data to inspect the result." ] }, { @@ -601,7 +601,7 @@ "id": "56", "metadata": {}, "source": [ - "This gives us a result which we can attach to the request" + "If everything looks good, we can approve the request" ] }, { @@ -611,7 +611,7 @@ "metadata": {}, "outputs": [], "source": [ - "request.accept_by_depositing_result(real_result)" + "request.approve()" ] }, { @@ -619,7 +619,7 @@ "id": "58", "metadata": {}, "source": [ - "## Auditor Receives Final Results" + "## Auditor computes Final Results" ] }, { @@ -661,14 +661,6 @@ " \n", "" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "63", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -687,7 +679,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb b/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb index 841c39c021b..a919a9e2a8e 100644 --- a/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb +++ b/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb @@ -199,7 +199,7 @@ "id": "18", "metadata": {}, "source": [ - "## 2. DO runs the submitted code on private data, then deposits the results to the domain so the DS can retrieve them" + "## 2. DO runs the submitted code on private data, then approves the request so the DS can execute the function" ] }, { @@ -257,7 +257,7 @@ "metadata": {}, "outputs": [], "source": [ - "res = request.accept_by_depositing_result((train_accs, params))" + "res = request.approve()" ] }, { @@ -278,14 +278,6 @@ "source": [ "### 📓 Now switch to the [second DS's notebook](./03-data-scientist-download-results.ipynb)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -304,7 +296,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb b/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb index d5cdc94cc9d..5026c62450f 100644 --- a/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb +++ b/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb @@ -612,7 +612,7 @@ "id": "50", "metadata": {}, "source": [ - "# Data owner: execute function" + "# Data owner: approve request" ] }, { @@ -688,38 +688,33 @@ ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "id": "57", - "metadata": { - "tags": [] - }, - "outputs": [], + "metadata": {}, "source": [ - "request = project_notification.link.events[0].request" + "### Review and approve request" ] }, { "cell_type": "code", "execution_count": null, "id": "58", - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "func = request.code" + "request = project_notification.link.events[0].request" ] }, { "cell_type": "code", "execution_count": null, "id": "59", - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "# func = request.code\n", - "#" + "func = request.code" ] }, { @@ -791,7 +786,7 @@ }, "outputs": [], "source": [ - "result = request.accept_by_depositing_result(real_result)\n", + "result = request.approve()\n", "assert isinstance(result, sy.SyftSuccess)" ] }, @@ -800,7 +795,7 @@ "id": "66", "metadata": {}, "source": [ - "# Data scientist: fetch result" + "# Data scientist: compute result" ] }, { @@ -851,26 +846,6 @@ "real_result = result_ptr.get()\n", "real_result.plot()" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "71", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "node.land()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "72", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -889,7 +864,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb b/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb index 09e1e25b8dc..97482b84975 100644 --- a/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb +++ b/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb @@ -818,7 +818,7 @@ "id": "66", "metadata": {}, "source": [ - "# Data owner: execute function" + "# Data owner: approve request" ] }, { @@ -961,7 +961,7 @@ }, "outputs": [], "source": [ - "result = request.accept_by_depositing_result(real_result)\n", + "result = request.approve()\n", "assert isinstance(result, sy.SyftSuccess)" ] }, @@ -970,7 +970,7 @@ "id": "79", "metadata": {}, "source": [ - "# Data scientist: fetch result" + "# Data scientist: compute result" ] }, { @@ -1040,7 +1040,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb b/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb index 51443872eb7..152da5c6c45 100644 --- a/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb +++ b/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb @@ -21,6 +21,9 @@ "execution_count": null, "id": "2", "metadata": { + "jupyter": { + "source_hidden": true + }, "tags": [] }, "outputs": [], @@ -932,7 +935,7 @@ "id": "75", "metadata": {}, "source": [ - "# Data owner: execute function" + "# Data owner: approve request" ] }, { @@ -1075,7 +1078,7 @@ }, "outputs": [], "source": [ - "result = request.accept_by_depositing_result(real_result)\n", + "result = request.approve()\n", "assert isinstance(result, sy.SyftSuccess)" ] }, @@ -1084,7 +1087,7 @@ "id": "88", "metadata": {}, "source": [ - "# Data scientist: fetch result" + "# Data scientist: compute result" ] }, { @@ -1154,7 +1157,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb b/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb index 29878fd826c..2ba4b0cfe7c 100644 --- a/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb +++ b/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb @@ -692,7 +692,7 @@ "id": "57", "metadata": {}, "source": [ - "# Data owner: execute syft_function" + "# Data owner: approve request" ] }, { @@ -847,7 +847,7 @@ }, "outputs": [], "source": [ - "result = request.accept_by_depositing_result(real_result)\n", + "result = request.approve()\n", "assert isinstance(result, sy.SyftSuccess)" ] }, @@ -856,7 +856,7 @@ "id": "71", "metadata": {}, "source": [ - "# Data scientist: fetch result" + "# Data scientist: compute result" ] }, { @@ -926,7 +926,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb b/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb index 9afc01da2ec..56718085058 100644 --- a/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb +++ b/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb @@ -879,7 +879,7 @@ "id": "72", "metadata": {}, "source": [ - "# Data owner: execute syft function" + "# Data owner: approve request" ] }, { @@ -1049,7 +1049,7 @@ }, "outputs": [], "source": [ - "result = request.accept_by_depositing_result(real_result)\n", + "result = request.approve()\n", "assert isinstance(result, sy.SyftSuccess)" ] }, @@ -1058,7 +1058,7 @@ "id": "87", "metadata": {}, "source": [ - "# Data scientist: fetch result" + "# Data scientist: compute result" ] }, { @@ -1195,7 +1195,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb b/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb index 3544f6b82f4..cbdb061df8d 100644 --- a/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb +++ b/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb @@ -781,7 +781,7 @@ "id": "63", "metadata": {}, "source": [ - "# Data owner: execute syft_function" + "# Data owner: approve request" ] }, { @@ -937,7 +937,7 @@ }, "outputs": [], "source": [ - "result = request.accept_by_depositing_result(real_result)\n", + "result = request.approve()\n", "assert isinstance(result, sy.SyftSuccess)" ] }, @@ -946,7 +946,7 @@ "id": "77", "metadata": {}, "source": [ - "# Data scientist: fetch result" + "# Data scientist: compute result" ] }, { @@ -1092,7 +1092,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.8" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb b/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb index f64f8728793..801509179d4 100644 --- a/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb +++ b/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb @@ -836,7 +836,7 @@ "id": "64", "metadata": {}, "source": [ - "# Data owner: execute syft_function" + "# Data owner: approve request" ] }, { @@ -1004,7 +1004,7 @@ }, "outputs": [], "source": [ - "result = request.accept_by_depositing_result(real_result)\n", + "result = request.approve()\n", "assert isinstance(result, sy.SyftSuccess)" ] }, @@ -1025,7 +1025,7 @@ "id": "80", "metadata": {}, "source": [ - "# Data scientist: fetch result" + "# Data scientist: compute result" ] }, { @@ -1106,7 +1106,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.10.13" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb b/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb index 6d1c11f3153..58b0132bd25 100644 --- a/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb +++ b/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb @@ -787,7 +787,7 @@ "id": "62", "metadata": {}, "source": [ - "# Data owner: execute syft_function" + "# Data owner: approve request" ] }, { @@ -943,7 +943,7 @@ }, "outputs": [], "source": [ - "result = request.accept_by_depositing_result(real_result)\n", + "result = request.approve()\n", "assert isinstance(result, sy.SyftSuccess)" ] }, @@ -953,7 +953,7 @@ "id": "76", "metadata": {}, "source": [ - "# Data Owner: fetch result" + "# Data Owner: compute result" ] }, { @@ -1034,7 +1034,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.10.13" }, "toc": { "base_numbering": 1, From 20707b2fa00a7b27facaf0d400950b43f38d820b Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Thu, 30 May 2024 13:35:46 +0200 Subject: [PATCH 015/313] deprecate accept_by_depositing_result, fix non-sync tests --- .../syft/src/syft/service/request/request.py | 217 +----------------- .../request/request_code_accept_deny_test.py | 4 +- .../syft/tests/syft/users/user_code_test.py | 10 +- 3 files changed, 15 insertions(+), 216 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 0da2c5dbcc3..e9e92d4f85d 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -37,6 +37,7 @@ from ...types.uid import UID from ...util import options from ...util.colors import SURFACE +from ...util.decorators import deprecated from ...util.markdown import markdown_as_class_with_fields from ...util.notebook_ui.icons import Icon from ...util.util import prompt_warning_message @@ -54,7 +55,6 @@ from ..job.job_stash import JobInfo from ..job.job_stash import JobStatus from ..notification.notifications import Notification -from ..policy.policy import UserPolicy from ..response import SyftError from ..response import SyftSuccess from ..user.user import UserView @@ -842,214 +842,13 @@ def deposit_result( return job - # @deprecated( - # return_syfterror=True, - # reason="accept_by_depositing_result has been removed. Use approve instead to " - # "approve this request, or deposit_result to deposit a new result.", - # ) - def accept_by_depositing_result( - self, result: Any, force: bool = False - ) -> SyftError | SyftSuccess: - # this code is extremely brittle because its a work around that relies on - # the type of request being very specifically tied to code which needs approving - - # Special case for results from Jobs (High-low side async) - if isinstance(result, JobInfo): - job_info = result - if not job_info.includes_result: - return SyftError( - message="JobInfo should not include result. Use sync_job instead." - ) - result = job_info.result - elif isinstance(result, ActionObject): - # Do not allow accepting a result produced by a Job, - # This can cause an inconsistent Job state - action_object_job = self._get_job_from_action_object(result) - if action_object_job is not None: - return SyftError( - message=f"This ActionObject is the result of Job {action_object_job.id}, " - f"please use the `Job.info` instead." - ) - else: - job_info = JobInfo( - includes_metadata=True, - includes_result=True, - status=JobStatus.COMPLETED, - resolved=True, - ) - else: - # NOTE result is added at the end of function (once ActionObject is created) - job_info = JobInfo( - includes_metadata=True, - includes_result=True, - status=JobStatus.COMPLETED, - resolved=True, - ) - - user_code_status_change: UserCodeStatusChange = self.changes[0] - code = user_code_status_change.code - output_history = code.output_history - if isinstance(output_history, SyftError): - return output_history - output_policy = code.output_policy - if isinstance(output_policy, SyftError): - return output_policy - if isinstance(user_code_status_change.code.output_policy_type, UserPolicy): - return SyftError( - message="UserCode uses an user-submitted custom policy. Please use .approve()" - ) - - if not user_code_status_change.change_object_is_type(UserCodeStatusCollection): - raise TypeError( - f"accept_by_depositing_result can only be run on {UserCodeStatusCollection} not " - f"{user_code_status_change.linked_obj.object_type}" - ) - if not type(user_code_status_change) == UserCodeStatusChange: - raise TypeError( - f"accept_by_depositing_result can only be run on {UserCodeStatusChange} not " - f"{type(user_code_status_change)}" - ) - - api = APIRegistry.api_for(self.node_uid, self.syft_client_verify_key) - if not api: - raise Exception( - f"No access to Syft API. Please login to {self.node_uid} first." - ) - if api.signing_key is None: - raise ValueError(f"{api}'s signing key is None") - is_approved = user_code_status_change.approved - - permission_request = self.approve(approve_nested=True) - if isinstance(permission_request, SyftError): - return permission_request - - job = self._get_latest_or_create_job() - if isinstance(job, SyftError): - return job - - # This weird order is due to the fact that state is None before calling approve - # we could fix it in a future release - if is_approved: - if not force: - return SyftError( - message="Already approved, if you want to force updating the result use force=True" - ) - # TODO: this should overwrite the output history instead - action_obj_id = output_history[0].output_ids[0] # type: ignore - - if not isinstance(result, ActionObject): - action_object = ActionObject.from_obj( - result, - id=action_obj_id, - syft_client_verify_key=api.signing_key.verify_key, - syft_node_location=api.node_uid, - ) - else: - action_object = result - action_object_is_from_this_node = ( - self.syft_node_location == action_object.syft_node_location - ) - if ( - action_object.syft_blob_storage_entry_id is None - or not action_object_is_from_this_node - ): - action_object.reload_cache() - action_object.syft_node_location = self.syft_node_location - action_object.syft_client_verify_key = self.syft_client_verify_key - blob_store_result = action_object._save_to_blob_storage() - if isinstance(blob_store_result, SyftError): - return blob_store_result - result = api.services.action.set(action_object) - if isinstance(result, SyftError): - return result - else: - if not isinstance(result, ActionObject): - action_object = ActionObject.from_obj( - result, - syft_client_verify_key=api.signing_key.verify_key, - syft_node_location=api.node_uid, - ) - else: - action_object = result - - # TODO: proper check for if actionobject is already uploaded - # we also need this for manualy syncing - action_object_is_from_this_node = ( - self.syft_node_location == action_object.syft_node_location - ) - if ( - action_object.syft_blob_storage_entry_id is None - or not action_object_is_from_this_node - ): - action_object.reload_cache() - action_object.syft_node_location = self.syft_node_location - action_object.syft_client_verify_key = self.syft_client_verify_key - blob_store_result = action_object._save_to_blob_storage() - if isinstance(blob_store_result, SyftError): - return blob_store_result - result = api.services.action.set(action_object) - if isinstance(result, SyftError): - return result - - # Do we still need this? - # policy_state_mutation = ObjectMutation( - # linked_obj=user_code_status_change.linked_obj, - # attr_name="output_policy", - # match_type=True, - # value=output_policy, - # ) - - action_object_link = LinkedObject.from_obj(result, node_uid=self.node_uid) - permission_change = ActionStoreChange( - linked_obj=action_object_link, - apply_permission_type=ActionPermission.READ, - ) - - new_changes = [permission_change] - result_request = api.services.request.add_changes( - uid=self.id, changes=new_changes - ) - if isinstance(result_request, SyftError): - return result_request - self = result_request - - approved = self.approve(disable_warnings=True, approve_nested=True) - if isinstance(approved, SyftError): - return approved - - input_ids = {} - if code.input_policy is not None: - for inps in code.input_policy.inputs.values(): - input_ids.update(inps) - - res = api.services.code.store_execution_output( - user_code_id=code.id, - outputs=result, - job_id=job.id, - input_ids=input_ids, - ) - if isinstance(res, SyftError): - return res - - job_info.result = action_object - job_info.status = ( - JobStatus.ERRORED - if isinstance(action_object.syft_action_data, Err) - else JobStatus.COMPLETED - ) - - existing_result = job.result.id if job.result is not None else None - print( - f"Job({job.id}) Setting new result {existing_result} -> {job_info.result.id}" - ) - job.apply_info(job_info) - - job_service = api.services.job - res = job_service.update(job) - if isinstance(res, SyftError): - return res - - return SyftSuccess(message="Request submitted for updating result.") + @deprecated( + return_syfterror=True, + reason="accept_by_depositing_result has been removed. Use approve instead to " + "approve this request, or deposit_result to deposit a new result.", + ) + def accept_by_depositing_result(self, result: Any, force: bool = False) -> Any: + pass def sync_job( self, job_info: JobInfo, **kwargs: Any diff --git a/packages/syft/tests/syft/request/request_code_accept_deny_test.py b/packages/syft/tests/syft/request/request_code_accept_deny_test.py index e84f0360b12..2cd05c49a9a 100644 --- a/packages/syft/tests/syft/request/request_code_accept_deny_test.py +++ b/packages/syft/tests/syft/request/request_code_accept_deny_test.py @@ -183,13 +183,13 @@ def simple_function(data): assert not isinstance(result, SyftError) request = root_client.requests.get_all()[0] - result = request.accept_by_depositing_result(result=10) + result = request.approve() assert isinstance(result, SyftSuccess) request = root_client.requests.get_all()[0] assert request.status == RequestStatus.APPROVED result = ds_client.code.simple_function(data=action_obj) - assert result.get() == 10 + assert result.get() == sum(dummy_data) result = request.deny(reason="Function output needs differential privacy !!") assert isinstance(result, SyftSuccess) diff --git a/packages/syft/tests/syft/users/user_code_test.py b/packages/syft/tests/syft/users/user_code_test.py index 53758e3c451..22de6447587 100644 --- a/packages/syft/tests/syft/users/user_code_test.py +++ b/packages/syft/tests/syft/users/user_code_test.py @@ -59,7 +59,7 @@ def test_user_code(worker) -> None: request = message.link user_code = request.changes[0].code result = user_code.unsafe_function() - request.accept_by_depositing_result(result) + request.approve() result = guest_client.api.services.code.mock_syft_func() assert isinstance(result, ActionObject) @@ -310,22 +310,22 @@ def compute_sum(): ds_client.api.services.code.request_code_execution(compute_sum) - # no accept_by_depositing_result, no mock execution + # not approved, no mock execution result = ds_client.api.services.code.compute_sum() assert isinstance(result, SyftError) - # no accept_by_depositing_result, mock execution + # not approved, mock execution users[-1].allow_mock_execution() result = ds_client.api.services.code.compute_sum() assert result.get() == 1 - # accept_by_depositing_result, no mock execution + # approved, no mock execution users[-1].allow_mock_execution(allow=False) message = root_domain_client.notifications[-1] request = message.link user_code = request.changes[0].code result = user_code.unsafe_function() - request.accept_by_depositing_result(result) + request.approve() result = ds_client.api.services.code.compute_sum() assert result.get() == 1 From 56803d4ebdb3715ae695152bfd217f45868df55f Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 30 May 2024 14:53:16 +0200 Subject: [PATCH 016/313] fix share_private_data_for_diff --- packages/syft/src/syft/client/syncing.py | 1 - .../syft/src/syft/service/sync/diff_state.py | 9 ++++----- .../src/syft/service/sync/resolve_widget.py | 19 ++++++------------- 3 files changed, 10 insertions(+), 19 deletions(-) diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index abc2f2faa29..c3f4178fa5b 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -147,7 +147,6 @@ def handle_sync_batch( ) share_private_data_for_diff = share_private_data[diff.object_id] # add sync private option to execution output - share_private_data_for_diff = True mockify_for_diff = mockify[diff.object_id] instruction = SyncInstruction.from_batch_decision( diff=diff, diff --git a/packages/syft/src/syft/service/sync/diff_state.py b/packages/syft/src/syft/service/sync/diff_state.py index f943174d75a..a11ff69a1e3 100644 --- a/packages/syft/src/syft/service/sync/diff_state.py +++ b/packages/syft/src/syft/service/sync/diff_state.py @@ -1537,7 +1537,10 @@ def from_batch_decision( if share_private_data: # or diff.object_type == "Job": if share_to_user is None: # job ran by another user - if not diff.object_type == "Job": + if ( + diff.object_type != "Job" + and diff.object_type != "ExecutionOutput" + ): raise ValueError( "share_to_user is required to share private data" ) @@ -1550,10 +1553,6 @@ def from_batch_decision( ) ] - # TODO move this to the widget - # if widget.has_unused_share_button: - # print("Share button was not used, so we will mockify the object") - # storage permissions new_storage_permissions = [] diff --git a/packages/syft/src/syft/service/sync/resolve_widget.py b/packages/syft/src/syft/service/sync/resolve_widget.py index c12883979bc..b9dadcb319e 100644 --- a/packages/syft/src/syft/service/sync/resolve_widget.py +++ b/packages/syft/src/syft/service/sync/resolve_widget.py @@ -126,11 +126,6 @@ def set_share_private_data(self) -> None: def mockify(self) -> bool: return not self.share_private_data - @property - def has_unused_share_button(self) -> bool: - # does not have share button - return False - @property def share_private_data(self) -> bool: # there are TwinAPIEndpoint.__private_sync_attr_mocks__ @@ -210,26 +205,24 @@ def __init__( direction: SyncDirection, ): self.direction = direction + self.share_private_data = False self.diff: ObjectDiff = diff self.sync: bool = False self.is_main_widget: bool = False + self.has_private_data = isinstance( + self.diff.non_empty_object, SyftLog | ActionObject | TwinAPIEndpoint + ) self.widget = self.build() self.set_and_disable_sync() @property def mockify(self) -> bool: - if isinstance(self.diff.non_empty_object, TwinAPIEndpoint): - return True - if self.has_unused_share_button: + if self.has_private_data and not self.share_private_data: return True else: return False - @property - def has_unused_share_button(self) -> bool: - return self.show_share_button and not self.share_private_data - @property def warning_html(self) -> str: if self.show_share_button: @@ -243,7 +236,7 @@ def warning_html(self) -> str: @property def show_share_button(self) -> bool: - return isinstance(self.diff.non_empty_object, SyftLog | ActionObject) + return self.has_private_data @property def title(self) -> str: From 388e4357d591f6a6013284f2482674766f3e1e39 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 30 May 2024 15:24:28 +0200 Subject: [PATCH 017/313] WIPPPPPPP --- .../syft/src/syft/service/code/user_code.py | 36 +++++++++++++++++-- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 1dd6446cc24..756c342992b 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -31,7 +31,7 @@ from typing_extensions import Self # relative -from ...abstract_node import NodeType +from ...abstract_node import NodeSideType, NodeType from ...client.api import APIRegistry from ...client.api import NodeIdentity from ...client.enclave_client import EnclaveMetadata @@ -119,9 +119,19 @@ class UserCodeStatusCollection(SyncableSyftObject): __repr_attrs__ = ["approved", "status_dict"] - status_dict: dict[NodeIdentity, tuple[UserCodeStatus, str]] = {} + status_dict_: dict[NodeIdentity, tuple[UserCodeStatus, str]] = {} user_code_link: LinkedObject + @property + def status_dict(self) -> dict[NodeIdentity, tuple[UserCodeStatus, str]]: + return self.status_dict_ + + @status_dict.setter + def status_dict( + self, value: dict[NodeIdentity, tuple[UserCodeStatus, str]] + ) -> None: + self.status_dict_ = value + def syft_get_diffs(self, ext_obj: Any) -> list[AttrDiff]: # relative from ...service.sync.diff_state import AttrDiff @@ -278,6 +288,7 @@ class UserCode(SyncableSyftObject): signature: inspect.Signature status_link: LinkedObject input_kwargs: list[str] + origin_node_side_type: NodeSideType enclave_metadata: EnclaveMetadata | None = None submit_time: DateTime | None = None # tracks if the code calls domain.something, variable is set during parsing @@ -365,7 +376,17 @@ def user(self) -> UserView | SyftError: return api.services.user.get_by_verify_key(self.user_verify_key) @property - def status(self) -> UserCodeStatusCollection | SyftError: + def status(self) -> UserCodeStatusCollection | UserCodeStatus | SyftError: + if self.origin_node_side_type == NodeSideType.LOW_SIDE and self.output_history: + return UserCodeStatus.APPROVED + elif ( + self.origin_node_side_type == NodeSideType.LOW_SIDE + and not self.output_history + ): + return UserCodeStatus.PENDING + + # NOTE: what is the reject condition on L0 setups? + # Clientside only res = self.status_link.resolve return res @@ -1292,6 +1313,14 @@ def set_default_pool_if_empty(context: TransformContext) -> TransformContext: return context +def set_origin_node_side_type(context: TransformContext) -> TransformContext: + if context.node and context.output: + context.output["origin_node_side_type"] = ( + context.node.node_side_type or NodeSideType.HIGH_SIDE + ) + return context + + @transform(SubmitUserCode, UserCode) def submit_user_code_to_user_code() -> list[Callable]: return [ @@ -1307,6 +1336,7 @@ def submit_user_code_to_user_code() -> list[Callable]: add_node_uid_for_key("node_uid"), add_submit_time, set_default_pool_if_empty, + set_origin_node_side_type, ] From 8be2d0565d39e12bce6c57ac97b083fcbb9ff438 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Thu, 30 May 2024 18:20:17 +0200 Subject: [PATCH 018/313] remove adding read permissions on deposit_result --- packages/syft/src/syft/service/request/request.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index e9e92d4f85d..13c31fb785b 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -793,7 +793,6 @@ def deposit_result( self, result: Any, logs: str = "", - add_code_owner_read_permissions: bool = True, ) -> Job | SyftError: """ Adds a result to this Request: @@ -825,12 +824,13 @@ def deposit_result( return action_object # Create Job + # NOTE code owner read permissions are added when syncing this Job job = api.services.job.create_job_for_user_code_id( code.id, result=action_object, log_stdout=logs, status=JobStatus.COMPLETED, - add_code_owner_read_permissions=add_code_owner_read_permissions, + add_code_owner_read_permissions=False, ) if isinstance(job, SyftError): return job From 14f992648c3785005cbf5b62341ad27caa335cea Mon Sep 17 00:00:00 2001 From: khoaguin Date: Fri, 31 May 2024 07:33:06 +0700 Subject: [PATCH 019/313] [syft/job] wait for result after the job is errored / resolved / timed out --- .../src/syft/service/action/action_service.py | 8 ------- .../syft/src/syft/service/job/job_stash.py | 23 ++++++++++++++++--- packages/syft/src/syft/service/queue/queue.py | 10 +++++--- 3 files changed, 27 insertions(+), 14 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 9a3ba935113..795e0775a94 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -155,14 +155,6 @@ def is_resolved( # If it's a leaf but not resolved yet, return false elif not obj.syft_resolved: - user_code_service = context.node.get_service("usercodeservice") - if not user_code_service.is_execution_on_owned_args_allowed(context): - return Err( - "You do not have the permissions for mock execution, please contact the admin" - ) - print( - f"inside ActionService.is_resolved (3). {result = }. {obj.syft_resolved = }" - ) return Ok(False) print(f"inside ActionService.is_resolved (4). {obj = }.") diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 1a0e5cd2bb8..4625dd0f7d1 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -659,13 +659,17 @@ def wait( if self.resolved: return self.resolve - if not job_only and self.result is not None: - self.result.wait(timeout) - print_warning = True counter = 0 while True: self.fetch() + print(f"{self.resolved = }. {self.status = }. {self.result = }") + if isinstance(self.result, Err): + return SyftError( + message=f"Waiting for job with id '{self.id}' failed with error: {self.result.err()}" + ) + if isinstance(self.result, SyftError): + return self.result if print_warning and self.result is not None: result_obj = api.services.action.get( self.result.id, resolve_nested=False @@ -685,6 +689,19 @@ def wait( counter += 1 if counter > timeout: return SyftError(message="Reached Timeout!") + + # TODO: if self.resolve is error, return SyftError and not wait for the result + # should we wait on the job first, and then wait on the result + # now we are waiting for the result before the job is resolved + # if a job is error, we should return it and not wait for the result + # if a job is completed, then we should we for the result + if not job_only and self.result is not None: # type: ignore[unreachable] + print(f"Waiting for result of job with id '{self.id}'") + self.result.wait(timeout) + + print( + f"Job with id '{self.id}' is resolved with {self.resolve = }, {self.result = }" + ) return self.resolve # type: ignore[unreachable] @property diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index 968e4b7c975..025c139663f 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -184,7 +184,9 @@ def handle_message_multiprocessing( try: call_method = getattr(worker.get_service(queue_item.service), queue_item.method) - + logger.info( + f"Calling {queue_item.service}.{queue_item.method} with {queue_item.args}" + ) role = worker.get_role_for_credentials(credentials=credentials) context = AuthedServiceContext( @@ -205,7 +207,7 @@ def handle_message_multiprocessing( ) result: Any = call_method(context, *queue_item.args, **queue_item.kwargs) - + logger.info(f"Result: {result}") status = Status.COMPLETED job_status = JobStatus.COMPLETED @@ -227,7 +229,7 @@ def handle_message_multiprocessing( job_status = JobStatus.ERRORED # stdlib - raise e + logger.error(f"Error while handle message multiprocessing: {e}") # result = SyftError( # message=f"Failed with exception: {e}, {traceback.format_exc()}" # ) @@ -247,6 +249,8 @@ def handle_message_multiprocessing( job_item.resolved = True job_item.status = job_status + logger.info(f"{job_item.__dict__}") + worker.queue_stash.set_result(credentials, queue_item) worker.job_stash.set_result(credentials, job_item) From dce03d5aea2cecc356f54f0b424f22b5720de948 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Fri, 31 May 2024 15:18:23 +0200 Subject: [PATCH 020/313] update computed status + deny --- packages/syft/src/syft/client/client.py | 55 -------- .../syft/src/syft/service/code/user_code.py | 117 +++++++++++++----- .../syft/service/code/user_code_service.py | 28 +++++ .../src/syft/service/output/output_service.py | 36 ++++++ .../syft/src/syft/service/request/request.py | 49 +++++--- 5 files changed, 186 insertions(+), 99 deletions(-) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index ba4dfc38c80..4e54a472dd8 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -4,7 +4,6 @@ # stdlib import base64 from collections.abc import Callable -from copy import deepcopy from enum import Enum from getpass import getpass import json @@ -561,60 +560,6 @@ def create_project( project = project_create.send() return project - # TODO: type of request should be REQUEST, but it will give circular import error - def sync_code_from_request(self, request: Any) -> SyftSuccess | SyftError: - # relative - from ..service.code.user_code import UserCode - from ..service.code.user_code import UserCodeStatusCollection - from ..store.linked_obj import LinkedObject - - code: UserCode | SyftError = request.code - if isinstance(code, SyftError): - return code - - code = deepcopy(code) - code.node_uid = self.id - code.user_verify_key = self.verify_key - - def get_nested_codes(code: UserCode) -> list[UserCode]: - result: list[UserCode] = [] - if code.nested_codes is None: - return result - - for _, (linked_code_obj, _) in code.nested_codes.items(): - nested_code = linked_code_obj.resolve - nested_code = deepcopy(nested_code) - nested_code.node_uid = code.node_uid - nested_code.user_verify_key = code.user_verify_key - result.append(nested_code) - result += get_nested_codes(nested_code) - - return result - - def get_code_statusses(codes: list[UserCode]) -> list[UserCodeStatusCollection]: - statusses = [] - for code in codes: - status = deepcopy(code.status) - statusses.append(status) - code.status_link = LinkedObject.from_obj(status, node_uid=code.node_uid) - return statusses - - nested_codes = get_nested_codes(code) - statusses = get_code_statusses(nested_codes + [code]) - - for c in nested_codes + [code]: - res = self.code.submit(c) - if isinstance(res, SyftError): - return res - - for status in statusses: - res = self.api.services.code_status.create(status) - if isinstance(res, SyftError): - return res - - self._fetch_api(self.credentials) - return SyftSuccess(message="User Code Submitted") - @property def authed(self) -> bool: return bool(self.credentials) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 756c342992b..be563394ac9 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -31,7 +31,8 @@ from typing_extensions import Self # relative -from ...abstract_node import NodeSideType, NodeType +from ...abstract_node import NodeSideType +from ...abstract_node import NodeType from ...client.api import APIRegistry from ...client.api import NodeIdentity from ...client.enclave_client import EnclaveMetadata @@ -42,6 +43,7 @@ from ...store.document_store import PartitionKey from ...store.linked_obj import LinkedObject from ...types.datetime import DateTime +from ...types.syft_object import PartialSyftObject from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_4 @@ -119,19 +121,9 @@ class UserCodeStatusCollection(SyncableSyftObject): __repr_attrs__ = ["approved", "status_dict"] - status_dict_: dict[NodeIdentity, tuple[UserCodeStatus, str]] = {} + status_dict: dict[NodeIdentity, tuple[UserCodeStatus, str]] = {} user_code_link: LinkedObject - @property - def status_dict(self) -> dict[NodeIdentity, tuple[UserCodeStatus, str]]: - return self.status_dict_ - - @status_dict.setter - def status_dict( - self, value: dict[NodeIdentity, tuple[UserCodeStatus, str]] - ) -> None: - self.status_dict_ = value - def syft_get_diffs(self, ext_obj: Any) -> list[AttrDiff]: # relative from ...service.sync.diff_state import AttrDiff @@ -286,15 +278,15 @@ class UserCode(SyncableSyftObject): user_unique_func_name: str code_hash: str signature: inspect.Signature - status_link: LinkedObject + status_link: LinkedObject | None = None input_kwargs: list[str] - origin_node_side_type: NodeSideType enclave_metadata: EnclaveMetadata | None = None submit_time: DateTime | None = None - # tracks if the code calls domain.something, variable is set during parsing - uses_domain: bool = False + uses_domain: bool = False # tracks if the code calls domain.something, variable is set during parsing nested_codes: dict[str, tuple[LinkedObject, dict]] | None = {} worker_pool_name: str | None = None + origin_node_side_type: NodeSideType + l0_deny_reason: str | None = None __table_coll_widths__ = [ "min-content", @@ -363,6 +355,14 @@ def _coll_repr_(self) -> dict[str, Any]: "Submit time": str(self.submit_time), } + @property + def is_low_side(self) -> bool: + return self.origin_node_side_type == NodeSideType.LOW_SIDE + + @property + def is_high_side(self) -> bool: + return self.origin_node_side_type == NodeSideType.HIGH_SIDE + @property def user(self) -> UserView | SyftError: api = APIRegistry.api_for( @@ -375,25 +375,74 @@ def user(self) -> UserView | SyftError: ) return api.services.user.get_by_verify_key(self.user_verify_key) - @property - def status(self) -> UserCodeStatusCollection | UserCodeStatus | SyftError: - if self.origin_node_side_type == NodeSideType.LOW_SIDE and self.output_history: - return UserCodeStatus.APPROVED - elif ( - self.origin_node_side_type == NodeSideType.LOW_SIDE - and not self.output_history - ): - return UserCodeStatus.PENDING + def _status_from_output_history( + self, context: AuthedServiceContext | None = None + ) -> UserCodeStatusCollection | SyftError: + if context is None: + # Clientside + api = self._get_api() + if isinstance(api, SyftError): + return api + node_identity = NodeIdentity.from_api(api) + is_approved = api.output.has_output_read_permissions( + self.id, self.user_verify_key + ) + else: + # Serverside + node_identity = NodeIdentity.from_node(context.node) + output_service = context.node.get_service("outputservice") + is_approved = output_service.has_output_read_permissions( + context, self.id, self.user_verify_key + ) - # NOTE: what is the reject condition on L0 setups? + is_denied = self.l0_deny_reason is not None + + if is_denied: + message = self.l0_deny_reason + status = (UserCodeStatus.DENIED, message) + elif is_approved: + status = (UserCodeStatus.APPROVED, "") + else: + status = (UserCodeStatus.PENDING, "") + status_dict = {node_identity: status} + + return UserCodeStatusCollection( + status_dict=status_dict, + user_code_link=LinkedObject.from_obj(self), + ) + @property + def status(self) -> UserCodeStatusCollection | SyftError: # Clientside only + + if self.is_low_side: + if self.status_link is not None: + return SyftError( + message="Encountered a low side UserCode object with a status_link." + ) + return self._status_from_output_history() + + if self.status_link is None: + return SyftError( + message="This UserCode does not have a status. Please contact the Admin." + ) res = self.status_link.resolve return res def get_status( self, context: AuthedServiceContext ) -> UserCodeStatusCollection | SyftError: + if self.origin_node_side_type == NodeSideType.LOW_SIDE: + if self.status_link is not None: + return SyftError( + message="Encountered a low side UserCode object with a status_link." + ) + return self._status_from_output_history(context) + if self.status_link is None: + return SyftError( + message="This UserCode does not have a status. Please contact the Admin." + ) + status = self.status_link.resolve_with_context(context) if status.is_err(): return SyftError(message=status.err()) @@ -782,6 +831,13 @@ def show_code_cell(self) -> None: ip.set_next_input(warning_message + self.raw_code) +class UserCodeUpdate(PartialSyftObject): + __canonical_name__ = "UserCodeUpdate" + __version__ = SYFT_OBJECT_VERSION_1 + + l0_deny_reason: str | None = None + + @serializable(without=["local_function"]) class SubmitUserCode(SyftObject): # version @@ -890,10 +946,7 @@ def _ephemeral_node_call( n_consumers=n_consumers, deploy_to="python", ) - ep_client = ep_node.login( - email="info@openmined.org", - password="changethis", - ) # nosec + ep_client = ep_node.login(email="info@openmined.org", password="changethis") # nosec self.input_policy_init_kwargs = cast(dict, self.input_policy_init_kwargs) for node_id, obj_dict in self.input_policy_init_kwargs.items(): # api = APIRegistry.api_for( @@ -1252,6 +1305,10 @@ def create_code_status(context: TransformContext) -> TransformContext: if context.output is None: return context + # Low side requests have a computed status + if context.node.node_side_type == NodeSideType.LOW_SIDE: + return context + input_keys = list(context.output["input_policy_init_kwargs"].keys()) code_link = LinkedObject.from_uid( context.output["id"], diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 0ccf29cffde..2141806599f 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -15,6 +15,7 @@ from ...store.document_store import DocumentStore from ...store.linked_obj import LinkedObject from ...types.cache_object import CachedSyftObject +from ...types.syft_metaclass import Empty from ...types.twin_object import TwinObject from ...types.uid import UID from ...util.telemetry import instrument @@ -43,6 +44,7 @@ from .user_code import SubmitUserCode from .user_code import UserCode from .user_code import UserCodeStatus +from .user_code import UserCodeUpdate from .user_code import load_approved_policy_code from .user_code_stash import UserCodeStash @@ -76,6 +78,32 @@ def _submit( result = self.stash.set(context.credentials, code) return result + @service_method( + path="code.update", + name="update", + roles=ADMIN_ROLE_LEVEL, + autosplat=["code_update"], + ) + def update( + self, + context: AuthedServiceContext, + code_update: UserCodeUpdate, + ) -> SyftSuccess | SyftError: + code = self.stash.get_by_uid(context.credentials, code_update.id) + + result = self.stash.update(context.credentials, code) + if result.is_err(): + return SyftError(message=str(result.err())) + + if code_update.l0_deny_reason is not Empty: + code.l0_deny_reason = code_update.l0_deny_reason + + result = self.stash.update(context.credentials, code) + + if result.is_ok(): + return result.ok() + return SyftError(message=str(result.err())) + @service_method(path="code.delete", name="delete", roles=ADMIN_ROLE_LEVEL) def delete( self, context: AuthedServiceContext, uid: UID diff --git a/packages/syft/src/syft/service/output/output_service.py b/packages/syft/src/syft/service/output/output_service.py index 3d32a3e622a..c74d7e8658a 100644 --- a/packages/syft/src/syft/service/output/output_service.py +++ b/packages/syft/src/syft/service/output/output_service.py @@ -1,5 +1,6 @@ # stdlib from typing import ClassVar +from typing import cast # third party from pydantic import model_validator @@ -23,6 +24,8 @@ from ...types.uid import UID from ...util.telemetry import instrument from ..action.action_object import ActionObject +from ..action.action_permissions import ActionObjectREAD +from ..action.action_service import ActionService from ..context import AuthedServiceContext from ..response import SyftError from ..service import AbstractService @@ -297,6 +300,39 @@ def get_by_user_code_id( return result.ok() return SyftError(message=result.err()) + @service_method( + path="output.has_output_read_permissions", + name="has_output_read_permissions", + roles=GUEST_ROLE_LEVEL, + ) + def has_output_read_permissions( + self, + context: AuthedServiceContext, + user_code_id: UID, + code_owner_verify_key: SyftVerifyKey, + ) -> bool: + action_service = cast(ActionService, context.node.get_service("actionservice")) + all_outputs = self.get_by_user_code_id(context, user_code_id) + if isinstance(all_outputs, SyftError): + return False + for output in all_outputs: + # Check if this output has permissions + if not self.stash.has_permission( + ActionObjectREAD(uid=output.id, credentials=code_owner_verify_key) + ): + continue + + # Check if all output ActionObjects have permissions + result_ids = output.output_id_list + permissions = [ + ActionObjectREAD(uid=_id, credentials=code_owner_verify_key) + for _id in result_ids + ] + if action_service.store.has_permissions(permissions): + return True + + return False + @service_method( path="output.get_by_job_id", name="get_by_job_id", diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index dbb9f9811a7..4cd9371d1fd 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -549,8 +549,15 @@ def icon(self) -> str: @property def status(self) -> RequestStatus: - if self.code.output_history: # Node side type? - return RequestStatus.APPROVED + if self.code.is_low_side: + code_status = self.code.status + if code_status == UserCodeStatus.PENDING: + return RequestStatus.PENDING + elif code_status == UserCodeStatus.DENIED: + return RequestStatus.REJECTED + elif code_status == UserCodeStatus.APPROVED: + return RequestStatus.APPROVED + if len(self.history) == 0: return RequestStatus.PENDING @@ -570,12 +577,14 @@ def approve( approve_nested: bool = False, **kwargs: dict, ) -> Result[SyftSuccess, SyftError]: - api = APIRegistry.api_for( - self.node_uid, - self.syft_client_verify_key, - ) - if api is None: - return SyftError(message=f"api is None. You must login to {self.node_uid}") + api = self._get_api() + if isinstance(api, SyftError): + return api + + if self.code.is_low_side: + return SyftError( + message="This request is a low-side request. Please sync your results to approve." + ) # TODO: Refactor so that object can also be passed to generate warnings if api.connection: metadata = api.connection.get_node_metadata(api.signing_key) @@ -619,15 +628,27 @@ def deny(self, reason: str) -> SyftSuccess | SyftError: Args: reason (str): Reason for which the request has been denied. """ - api = APIRegistry.api_for( - self.node_uid, - self.syft_client_verify_key, - ) - if api is None: - return SyftError(message=f"api is None. You must login to {self.node_uid}") + if reason is None: + return SyftError("Please provide a reason for denying the request.") + + api = self._get_api() + if isinstance(api, SyftError): + return api + + if self.code.is_low_side: + result = api.code.update(l0_deny_reason=reason) + if isinstance(result, SyftError): + return result + return SyftSuccess(message=f"Request denied with reason: {reason}") + return api.services.request.undo(uid=self.id, reason=reason) def approve_with_client(self, client: SyftClient) -> Result[SyftSuccess, SyftError]: + if self.code.is_low_side: + return SyftError( + message="This request is a low-side request. Please sync your results to approve." + ) + print(f"Approving request for domain {client.name}") return client.api.services.request.apply(self.id) From eb477fd2e6f02095b9928455d30d7cd7a8660eb0 Mon Sep 17 00:00:00 2001 From: dk Date: Mon, 3 Jun 2024 10:45:11 +0700 Subject: [PATCH 021/313] [syft/job] add some error handlings to `ActionObject.wait` and `Job.wait` --- .../src/syft/service/action/action_object.py | 12 ++++--- .../syft/src/syft/service/job/job_stash.py | 31 ++++++++++++++----- 2 files changed, 31 insertions(+), 12 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 29402d09d4b..e8cc1478611 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1403,11 +1403,15 @@ def wait(self, timeout: int | None = None) -> ActionObject | SyftError: counter = 0 while api: - obj_resolved = api.services.action.is_resolved(obj_id) - print(f"inside ActionObject.wait. {obj_resolved = }. {type(obj_resolved)}") - if "You do not have the permissions for mock execution" in obj_resolved: + obj_resolved: bool | str = api.services.action.is_resolved(obj_id) + print( + f"inside ActionObject.wait. {obj_resolved = }. {type(obj_resolved) = }" + ) + if isinstance(obj_resolved, str): return SyftError(message=obj_resolved) - if not obj_resolved: + if obj_resolved == True: + break + if obj_resolved == False: time.sleep(1) if timeout is not None: counter += 1 diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 4625dd0f7d1..1ef9965f81b 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -663,13 +663,28 @@ def wait( counter = 0 while True: self.fetch() + print(f"{self.resolved = }. {self.status = }. {self.result = }") - if isinstance(self.result, Err): - return SyftError( - message=f"Waiting for job with id '{self.id}' failed with error: {self.result.err()}" - ) - if isinstance(self.result, SyftError): - return self.result + + if self.resolved: + if isinstance(self.result, Err): + return SyftError( + message=f"Waiting for job with id '{self.id}' failed with error: {self.result.err()}" + ) + if isinstance(self.result, SyftError): + return SyftError( + message=f"Waiting for job with id '{self.id}' failed with error: {self.result.message}" + ) + if isinstance(self.result, ActionObject) and isinstance(self.result.syft_action_data, Err): + return SyftError( + message=f"Waiting for job with id '{self.id}' failed with error: {self.result.syft_action_data.err()}" + ) + if isinstance(self.result, ActionObject) and isinstance(self.result.syft_action_data, SyftError): + return SyftError( + message=f"Waiting for job with id '{self.id}' failed with error: {self.result.syft_action_data.message}" + ) + break # type: ignore[unreachable] + if print_warning and self.result is not None: result_obj = api.services.action.get( self.result.id, resolve_nested=False @@ -681,9 +696,9 @@ def wait( "Use job.wait().get() instead to wait for the linked result." ) print_warning = False + sleep(1) - if self.resolved: - break # type: ignore[unreachable] + # TODO: fix the mypy issue if timeout is not None: counter += 1 From 60c23db6c66f05806e0af44e9c9271633a209799 Mon Sep 17 00:00:00 2001 From: khoaguin Date: Mon, 3 Jun 2024 16:07:45 +0700 Subject: [PATCH 022/313] [syft/job] if a job errors out or can't be executed (e.g. due to not having permission), return `Err` for `job.result` and wrap it with `SyftError` together with the stack trace for `job.wait()` - remove debug prints Co-authored-by: Shubham Gupta --- .../src/syft/service/action/action_object.py | 10 +++---- .../syft/src/syft/service/job/job_stash.py | 26 +++++-------------- packages/syft/src/syft/service/queue/queue.py | 6 +---- 3 files changed, 11 insertions(+), 31 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 6acbc1dc7e6..ea37b63ed0c 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1247,9 +1247,10 @@ def get(self, block: bool = False) -> Any: self.wait() res = self.refresh_object() - if not isinstance(res, ActionObject): return SyftError(message=f"{res}") # type: ignore + elif issubclass(res.syft_action_data_type, Err): + return SyftError(message=f"{res.syft_action_data.err()}") else: if not self.has_storage_permission(): prompt_warning_message( @@ -1403,14 +1404,11 @@ def wait(self, timeout: int | None = None) -> ActionObject | SyftError: counter = 0 while api: obj_resolved: bool | str = api.services.action.is_resolved(obj_id) - print( - f"inside ActionObject.wait. {obj_resolved = }. {type(obj_resolved) = }" - ) if isinstance(obj_resolved, str): return SyftError(message=obj_resolved) - if obj_resolved == True: + if obj_resolved: break - if obj_resolved == False: + if not obj_resolved: time.sleep(1) if timeout is not None: counter += 1 diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 1ef9965f81b..928beaf850e 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -663,11 +663,8 @@ def wait( counter = 0 while True: self.fetch() - - print(f"{self.resolved = }. {self.status = }. {self.result = }") - if self.resolved: - if isinstance(self.result, Err): + if isinstance(self.result, Err): # type: ignore[unreachable] return SyftError( message=f"Waiting for job with id '{self.id}' failed with error: {self.result.err()}" ) @@ -675,15 +672,7 @@ def wait( return SyftError( message=f"Waiting for job with id '{self.id}' failed with error: {self.result.message}" ) - if isinstance(self.result, ActionObject) and isinstance(self.result.syft_action_data, Err): - return SyftError( - message=f"Waiting for job with id '{self.id}' failed with error: {self.result.syft_action_data.err()}" - ) - if isinstance(self.result, ActionObject) and isinstance(self.result.syft_action_data, SyftError): - return SyftError( - message=f"Waiting for job with id '{self.id}' failed with error: {self.result.syft_action_data.message}" - ) - break # type: ignore[unreachable] + break if print_warning and self.result is not None: result_obj = api.services.action.get( @@ -696,20 +685,17 @@ def wait( "Use job.wait().get() instead to wait for the linked result." ) print_warning = False - + sleep(1) - # TODO: fix the mypy issue if timeout is not None: counter += 1 if counter > timeout: return SyftError(message="Reached Timeout!") - # TODO: if self.resolve is error, return SyftError and not wait for the result - # should we wait on the job first, and then wait on the result - # now we are waiting for the result before the job is resolved - # if a job is error, we should return it and not wait for the result - # if a job is completed, then we should we for the result + # if self.resolve returns self.result as error, then we + # return SyftError and not wait for the result + # if a job is completed and not errored out, we would wait for the result if not job_only and self.result is not None: # type: ignore[unreachable] print(f"Waiting for result of job with id '{self.id}'") self.result.wait(timeout) diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index 025c139663f..8c0cc72fd51 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -207,7 +207,6 @@ def handle_message_multiprocessing( ) result: Any = call_method(context, *queue_item.args, **queue_item.kwargs) - logger.info(f"Result: {result}") status = Status.COMPLETED job_status = JobStatus.COMPLETED @@ -218,6 +217,7 @@ def handle_message_multiprocessing( ): status = Status.ERRORED job_status = JobStatus.ERRORED + result = result.syft_action_data elif isinstance(result, SyftError) or isinstance(result, Err): status = Status.ERRORED job_status = JobStatus.ERRORED @@ -230,10 +230,6 @@ def handle_message_multiprocessing( # stdlib logger.error(f"Error while handle message multiprocessing: {e}") - # result = SyftError( - # message=f"Failed with exception: {e}, {traceback.format_exc()}" - # ) - # print("HAD AN ERROR WHILE HANDLING MESSAGE", result.message) queue_item.result = result queue_item.resolved = True From cd78b3c0382838274f6c26321b43a2de19724587 Mon Sep 17 00:00:00 2001 From: khoaguin Date: Mon, 3 Jun 2024 16:39:25 +0700 Subject: [PATCH 023/313] [cleanup] remove debug statements --- packages/syft/src/syft/service/action/action_service.py | 3 --- packages/syft/src/syft/service/code/user_code_service.py | 5 ----- packages/syft/src/syft/service/job/job_stash.py | 6 +----- packages/syft/src/syft/service/queue/queue.py | 5 ----- 4 files changed, 1 insertion(+), 18 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 795e0775a94..6c576379594 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -146,7 +146,6 @@ def is_resolved( result = self.resolve_links( context, obj.syft_action_data.action_object_id.id ) - print(f"inside ActionService.is_resolved (2). {result = }") # Checking in case any error occurred if result.is_err(): return result @@ -157,10 +156,8 @@ def is_resolved( elif not obj.syft_resolved: return Ok(False) - print(f"inside ActionService.is_resolved (4). {obj = }.") # If it's not an action data link or non resolved (empty). It's resolved return Ok(True) - print(f"inside ActionService.is_resolved (5). {result = }.") # If it's not in the store or permission error, return the error return result diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index a281ce12bfa..78a7c1c3170 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -350,11 +350,6 @@ def is_execution_allowed( else: return True - @service_method( - path="code.is_execution_on_owned_args_allowed", - name="is_execution_on_owned_args_allowed", - roles=GUEST_ROLE_LEVEL, - ) def is_execution_on_owned_args_allowed( self, context: AuthedServiceContext ) -> bool | SyftError: diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 928beaf850e..7e60d0813be 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -695,14 +695,10 @@ def wait( # if self.resolve returns self.result as error, then we # return SyftError and not wait for the result - # if a job is completed and not errored out, we would wait for the result + # otherwise if a job is resolved and not errored out, we wait for the result if not job_only and self.result is not None: # type: ignore[unreachable] - print(f"Waiting for result of job with id '{self.id}'") self.result.wait(timeout) - print( - f"Job with id '{self.id}' is resolved with {self.resolve = }, {self.result = }" - ) return self.resolve # type: ignore[unreachable] @property diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index 8c0cc72fd51..c3a00e4528a 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -184,9 +184,6 @@ def handle_message_multiprocessing( try: call_method = getattr(worker.get_service(queue_item.service), queue_item.method) - logger.info( - f"Calling {queue_item.service}.{queue_item.method} with {queue_item.args}" - ) role = worker.get_role_for_credentials(credentials=credentials) context = AuthedServiceContext( @@ -245,8 +242,6 @@ def handle_message_multiprocessing( job_item.resolved = True job_item.status = job_status - logger.info(f"{job_item.__dict__}") - worker.queue_stash.set_result(credentials, queue_item) worker.job_stash.set_result(credentials, job_item) From 9e27535821072845d4a1ea199a584ff5b9e30243 Mon Sep 17 00:00:00 2001 From: khoaguin Date: Mon, 3 Jun 2024 17:17:40 +0700 Subject: [PATCH 024/313] [syft/test] fix `test_sync_with_error` according to changes to `ActionObject` --- .../syft/tests/syft/service/sync/sync_resolve_single_test.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py b/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py index 07585c6de87..b77d91412ae 100644 --- a/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py +++ b/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py @@ -1,5 +1,4 @@ # third party -from result import Err # syft absolute import syft @@ -136,7 +135,7 @@ def compute() -> int: client_low_ds.refresh() res = client_low_ds.code.compute(blocking=True) - assert isinstance(res.get(), Err) + assert isinstance(res.get(), SyftError) def test_ignore_unignore_single(low_worker, high_worker): From f47a4c85c70c688a32aeec21b77ddba1d7f66235 Mon Sep 17 00:00:00 2001 From: Julian Cardonnet Date: Mon, 3 Jun 2024 09:57:44 -0300 Subject: [PATCH 025/313] Add node description field on node metadata --- packages/syft/src/syft/service/settings/settings.py | 4 +++- packages/syft/src/syft/service/settings/settings_service.py | 1 + packages/syft/src/syft/util/schema.py | 1 + 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/settings/settings.py b/packages/syft/src/syft/service/settings/settings.py index 94adfbf307c..2d2a0a0f5f5 100644 --- a/packages/syft/src/syft/service/settings/settings.py +++ b/packages/syft/src/syft/service/settings/settings.py @@ -82,6 +82,7 @@ class NodeSettings(SyftObject): __repr_attrs__ = [ "name", "organization", + "description", "deployed_on", "signup_enabled", "admin_email", @@ -93,7 +94,7 @@ class NodeSettings(SyftObject): organization: str = "OpenMined" verify_key: SyftVerifyKey on_board: bool = True - description: str = "Text" + description: str = "This is the default description for a Domain Node." node_type: NodeType = NodeType.DOMAIN signup_enabled: bool admin_email: str @@ -116,6 +117,7 @@ def _repr_html_(self) -> Any:

Id: {self.id}

Name: {self.name}

Organization: {self.organization}

+

Description: {self.description}

Deployed on: {self.deployed_on}

Signup enabled: {self.signup_enabled}

Admin email: {self.admin_email}

diff --git a/packages/syft/src/syft/service/settings/settings_service.py b/packages/syft/src/syft/service/settings/settings_service.py index 35ef9262860..f404c498662 100644 --- a/packages/syft/src/syft/service/settings/settings_service.py +++ b/packages/syft/src/syft/service/settings/settings_service.py @@ -353,6 +353,7 @@ def welcome_show( FONT_CSS=FONT_CSS, grid_symbol=load_png_base64("small-grid-symbol-logo.png"), domain_name=context.node.name, + description=context.node.metadata.description, # node_url='http://testing:8080', node_type=context.node.metadata.node_type.capitalize(), node_side_type=node_side_type, diff --git a/packages/syft/src/syft/util/schema.py b/packages/syft/src/syft/util/schema.py index f918ed0d4af..fc4003267c3 100644 --- a/packages/syft/src/syft/util/schema.py +++ b/packages/syft/src/syft/util/schema.py @@ -59,6 +59,7 @@

Welcome to $domain_name

URL: $node_url
+ Node Description: $description
Node Type: $node_type
Node Side Type:$node_side_type
Syft Version: $node_version
From 3cf81584967668840b82debaf49b0fd05f6ad3c1 Mon Sep 17 00:00:00 2001 From: dk Date: Tue, 4 Jun 2024 10:23:04 +0700 Subject: [PATCH 026/313] [syft/test] change `test_function_error` test according to new changes in `ActionObject` --- packages/syft/src/syft/service/action/action_object.py | 2 +- tests/integration/local/twin_api_sync_test.py | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index ea37b63ed0c..1fcb3e27e19 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1212,7 +1212,7 @@ def get_from(self, client: SyftClient) -> Any: else: return res.syft_action_data - def refresh_object(self, resolve_nested: bool = True) -> ActionObject: + def refresh_object(self, resolve_nested: bool = True) -> ActionObject | SyftError: # relative from ...client.api import APIRegistry diff --git a/tests/integration/local/twin_api_sync_test.py b/tests/integration/local/twin_api_sync_test.py index e09c82001d1..ff6699b8edc 100644 --- a/tests/integration/local/twin_api_sync_test.py +++ b/tests/integration/local/twin_api_sync_test.py @@ -11,6 +11,7 @@ from syft.client.domain_client import DomainClient from syft.client.syncing import compare_clients from syft.client.syncing import resolve +from syft.service.action.action_object import ActionObject from syft.service.job.job_stash import JobStatus from syft.service.response import SyftError from syft.service.response import SyftSuccess @@ -148,10 +149,12 @@ def compute_sum(): ds_client.api.services.code.request_code_execution(compute_sum) users[-1].allow_mock_execution() + import pdb; pdb.set_trace() result = ds_client.api.services.code.compute_sum(blocking=True) - assert isinstance(result.get(), Err) + assert isinstance(result, ActionObject) + assert isinstance(result.get(), SyftError) job_info = ds_client.api.services.code.compute_sum(blocking=False) result = job_info.wait(timeout=10) - assert isinstance(result.get(), Err) + assert isinstance(result, SyftError) assert job_info.status == JobStatus.ERRORED From c7817bfb40250f1273ae7bf5ab1b72bf2aadac2c Mon Sep 17 00:00:00 2001 From: dk Date: Tue, 4 Jun 2024 10:27:05 +0700 Subject: [PATCH 027/313] remove import pdb --- tests/integration/local/twin_api_sync_test.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/integration/local/twin_api_sync_test.py b/tests/integration/local/twin_api_sync_test.py index ff6699b8edc..f13fd8ac76e 100644 --- a/tests/integration/local/twin_api_sync_test.py +++ b/tests/integration/local/twin_api_sync_test.py @@ -3,7 +3,6 @@ # third party import pytest -from result import Err # syft absolute import syft @@ -149,7 +148,6 @@ def compute_sum(): ds_client.api.services.code.request_code_execution(compute_sum) users[-1].allow_mock_execution() - import pdb; pdb.set_trace() result = ds_client.api.services.code.compute_sum(blocking=True) assert isinstance(result, ActionObject) assert isinstance(result.get(), SyftError) From 30a9d71b7e41f9232e9e1952a55de7c9cb90eec3 Mon Sep 17 00:00:00 2001 From: khoaguin Date: Tue, 4 Jun 2024 13:42:30 +0700 Subject: [PATCH 028/313] [syft] make errored results from non-blocking jobs again an `ActionObject` - change tests accordingly Co-authored-by: Shubham Gupta --- packages/syft/src/syft/service/queue/queue.py | 1 - tests/integration/local/twin_api_sync_test.py | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index c3a00e4528a..e8e755d450f 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -214,7 +214,6 @@ def handle_message_multiprocessing( ): status = Status.ERRORED job_status = JobStatus.ERRORED - result = result.syft_action_data elif isinstance(result, SyftError) or isinstance(result, Err): status = Status.ERRORED job_status = JobStatus.ERRORED diff --git a/tests/integration/local/twin_api_sync_test.py b/tests/integration/local/twin_api_sync_test.py index f13fd8ac76e..c19ed2fa022 100644 --- a/tests/integration/local/twin_api_sync_test.py +++ b/tests/integration/local/twin_api_sync_test.py @@ -154,5 +154,6 @@ def compute_sum(): job_info = ds_client.api.services.code.compute_sum(blocking=False) result = job_info.wait(timeout=10) - assert isinstance(result, SyftError) + assert isinstance(result, ActionObject) + assert isinstance(result.get(), SyftError) assert job_info.status == JobStatus.ERRORED From b807c551843134bb7c414f76fba32b687fb40227 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 07:29:21 +0200 Subject: [PATCH 029/313] Update api.py --- packages/syft/src/syft/client/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index c40677221e2..7d119b3d20b 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -740,7 +740,7 @@ def debox_signed_syftapicall_response( def downgrade_signature(signature: Signature, object_versions: dict) -> Signature: migrated_parameters = [] - for _, parameter in signature.parameters.items(): + for parameter in signature.parameters.values(): annotation = unwrap_and_migrate_annotation( parameter.annotation, object_versions ) From d45e531e4d6f44607fd4a967a392d34662851815 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 08:10:48 +0200 Subject: [PATCH 030/313] Update api.py --- packages/syft/src/syft/client/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 7d119b3d20b..4efdcbf6469 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -1101,7 +1101,7 @@ def build_endpoint_tree( endpoints: dict[str, LibEndpoint], communication_protocol: PROTOCOL_TYPE ) -> APIModule: api_module = APIModule(path="", refresh_callback=self.refresh_api_callback) - for _, v in endpoints.items(): + for v in endpoints.values(): signature = v.signature if not v.has_self: signature = signature_remove_self(signature) From 806836244f19ed2650cc5c7fb9bc3fbf258a367f Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 08:16:32 +0200 Subject: [PATCH 031/313] Update registry.py --- packages/syft/src/syft/client/registry.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/syft/src/syft/client/registry.py b/packages/syft/src/syft/client/registry.py index ee57b642f53..4128af452d8 100644 --- a/packages/syft/src/syft/client/registry.py +++ b/packages/syft/src/syft/client/registry.py @@ -405,10 +405,7 @@ def check_enclave(enclave: dict) -> dict[Any, Any] | None: executor.map(lambda enclave: check_enclave(enclave), enclaves) ) - online_enclaves = [] - for each in _online_enclaves: - if each is not None: - online_enclaves.append(each) + online_enclaves = [each for each in _online_enclaves if each is not None] return online_enclaves def _repr_html_(self) -> str: From 21b53f4956791801c3e092f9c2e9c8e48e600551 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 08:30:49 +0200 Subject: [PATCH 032/313] Update node.py --- packages/syft/src/syft/node/node.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 1e2c00c6f24..65a3ed80eda 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -619,7 +619,7 @@ def add_consumer_for_service( consumer.run() def remove_consumer_with_id(self, syft_worker_id: UID) -> None: - for _, consumers in self.queue_manager.consumers.items(): + for consumers in self.queue_manager.consumers.values(): # Grab the list of consumers for the given queue consumer_to_pop = None for consumer_idx, consumer in enumerate(consumers): @@ -834,8 +834,7 @@ def __repr__(self) -> str: service_string = "" if not self.is_subprocess: services = [] - for service in self.services: - services.append(service.__name__) + services = [service.__name__ for service in self.services] service_string = ", ".join(sorted(services)) service_string = f"\n\nServices:\n{service_string}" return f"{type(self).__name__}: {self.name} - {self.id} - {self.node_type}{service_string}" From b255bf463f60078555360055d4448eb0d892626e Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 08:51:50 +0200 Subject: [PATCH 033/313] Update k8s.py --- packages/syft/src/syft/custom_worker/k8s.py | 23 ++++++++++----------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/packages/syft/src/syft/custom_worker/k8s.py b/packages/syft/src/syft/custom_worker/k8s.py index cb4b5765e62..21eea90146e 100644 --- a/packages/syft/src/syft/custom_worker/k8s.py +++ b/packages/syft/src/syft/custom_worker/k8s.py @@ -120,14 +120,12 @@ def resolve_pod(client: kr8s.Api, pod: str | Pod) -> Pod | None: @staticmethod def get_logs(pods: list[Pod]) -> str: - """Combine and return logs for all the pods as string""" - logs = [] - for pod in pods: - logs.append(f"----------Logs for pod={pod.metadata.name}----------") - for log in pod.logs(): - logs.append(log) + """Combine and return logs for all the pods as a single string.""" + return "\n".join( + f"----------Logs for pod={pod.metadata.name}----------\n{''.join(pod.logs())}" + for pod in pods + ) - return "\n".join(logs) @staticmethod def get_pod_status(pod: Pod) -> PodStatus | None: @@ -150,11 +148,12 @@ def get_pod_env(pod: Pod) -> list[dict] | None: @staticmethod def get_container_exit_code(pods: list[Pod]) -> list[int]: """Return the exit codes of all the containers in the given pods.""" - exit_codes = [] - for pod in pods: - for container_status in pod.status.containerStatuses: - exit_codes.append(container_status.state.terminated.exitCode) - return exit_codes + return [ + container_status.state.terminated.exitCode + for pod in pods + for container_status in pod.status.containerStatuses + ] + @staticmethod def get_container_exit_message(pods: list[Pod]) -> str | None: From f153c6982d7e9b3beb1289786c98d10f2e039c77 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 08:54:12 +0200 Subject: [PATCH 034/313] Update k8s.py --- packages/syft/src/syft/custom_worker/k8s.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/syft/src/syft/custom_worker/k8s.py b/packages/syft/src/syft/custom_worker/k8s.py index 21eea90146e..7f76cb94337 100644 --- a/packages/syft/src/syft/custom_worker/k8s.py +++ b/packages/syft/src/syft/custom_worker/k8s.py @@ -126,7 +126,6 @@ def get_logs(pods: list[Pod]) -> str: for pod in pods ) - @staticmethod def get_pod_status(pod: Pod) -> PodStatus | None: """Map the status of the given pod to PodStatuss.""" @@ -154,7 +153,6 @@ def get_container_exit_code(pods: list[Pod]) -> list[int]: for container_status in pod.status.containerStatuses ] - @staticmethod def get_container_exit_message(pods: list[Pod]) -> str | None: """Return the exit message of the first container that exited with non-zero code.""" From 0c286511136fa820bec7617f9af85c355dfc741f Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 08:56:44 +0200 Subject: [PATCH 035/313] Update client.py --- packages/syft/src/syft/client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 498c22e7536..72a048d2d2b 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -589,7 +589,7 @@ def get_nested_codes(code: UserCode) -> list[UserCode]: if code.nested_codes is None: return result - for _, (linked_code_obj, _) in code.nested_codes.items(): + for linked_code_obj in code.nested_codes.keys(): nested_code = linked_code_obj.resolve nested_code = deepcopy(nested_code) nested_code.node_uid = code.node_uid From bc13c418cdc2cc52d46b8b8ae5713bd26f186bdc Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:02:38 +0200 Subject: [PATCH 036/313] Update server.py --- packages/syft/src/syft/node/server.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/packages/syft/src/syft/node/server.py b/packages/syft/src/syft/node/server.py index f5f05bf35ac..cf86da91d6a 100644 --- a/packages/syft/src/syft/node/server.py +++ b/packages/syft/src/syft/node/server.py @@ -285,14 +285,12 @@ def find_python_processes_on_port(port: int) -> list[int]: python_pids = [] for pid in pids: + if system == "Windows": + command = f"wmic process where (ProcessId='{pid}') get ProcessId,CommandLine" + else: + command = f"ps -p {pid} -o pid,command" + try: - if system == "Windows": - command = ( - f"wmic process where (ProcessId='{pid}') get ProcessId,CommandLine" - ) - else: - command = f"ps -p {pid} -o pid,command" - process = subprocess.Popen( # nosec command, shell=True, @@ -301,14 +299,14 @@ def find_python_processes_on_port(port: int) -> list[int]: text=True, ) output, _ = process.communicate() - lines = output.strip().split("\n") - - if len(lines) > 1 and "python" in lines[1].lower(): - python_pids.append(pid) - except Exception as e: print(f"Error checking process {pid}: {e}") - + continue + + lines = output.strip().split("\n") + if len(lines) > 1 and "python" in lines[1].lower(): + python_pids.append(pid) + return python_pids From 441e9266c0d7ce870c380979de79b442c56b79dd Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:05:20 +0200 Subject: [PATCH 037/313] Update server.py --- packages/syft/src/syft/node/server.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/node/server.py b/packages/syft/src/syft/node/server.py index cf86da91d6a..3da97e4b0a2 100644 --- a/packages/syft/src/syft/node/server.py +++ b/packages/syft/src/syft/node/server.py @@ -286,10 +286,12 @@ def find_python_processes_on_port(port: int) -> list[int]: python_pids = [] for pid in pids: if system == "Windows": - command = f"wmic process where (ProcessId='{pid}') get ProcessId,CommandLine" + command = ( + f"wmic process where (ProcessId='{pid}') get ProcessId,CommandLine" + ) else: command = f"ps -p {pid} -o pid,command" - + try: process = subprocess.Popen( # nosec command, @@ -302,11 +304,11 @@ def find_python_processes_on_port(port: int) -> list[int]: except Exception as e: print(f"Error checking process {pid}: {e}") continue - + lines = output.strip().split("\n") if len(lines) > 1 and "python" in lines[1].lower(): python_pids.append(pid) - + return python_pids From d96a4dcde1d870f52e05a406d46b9d19e1edb833 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:12:39 +0200 Subject: [PATCH 038/313] Update data_protocol.py --- packages/syft/src/syft/protocol/data_protocol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 79f0d680658..f357f867467 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -507,7 +507,7 @@ def calculate_supported_protocols(self) -> dict: # we assume its supported until we prove otherwise protocol_supported[v] = True # iterate through each object - for canonical_name, _ in version_data["object_versions"].items(): + for canonical_name in version_data["object_versions"].keys(): if canonical_name not in self.state: protocol_supported[v] = False break From cbb80e396ed038419a4f549308aef355eb61af34 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:25:17 +0200 Subject: [PATCH 039/313] Update recursive.py --- packages/syft/src/syft/serde/recursive.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/serde/recursive.py b/packages/syft/src/syft/serde/recursive.py index 02957e5f23d..f009ea34299 100644 --- a/packages/syft/src/syft/serde/recursive.py +++ b/packages/syft/src/syft/serde/recursive.py @@ -311,7 +311,7 @@ def rs_proto2object(proto: _DynamicStructBuilder) -> Any: # relative from ..node.node import CODE_RELOADER - for _, load_user_code in CODE_RELOADER.items(): + for load_user_code in CODE_RELOADER.values(): load_user_code() try: class_type = getattr(sys.modules[".".join(module_parts)], klass) From 0717723931943faf688fae800e10b951b803c666 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:30:17 +0200 Subject: [PATCH 040/313] Update recursive_primitives.py --- packages/syft/src/syft/serde/recursive_primitives.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/syft/src/syft/serde/recursive_primitives.py b/packages/syft/src/syft/serde/recursive_primitives.py index cb90932247a..a536950b5b7 100644 --- a/packages/syft/src/syft/serde/recursive_primitives.py +++ b/packages/syft/src/syft/serde/recursive_primitives.py @@ -64,17 +64,14 @@ def serialize_iterable(iterable: Collection) -> bytes: def deserialize_iterable(iterable_type: type, blob: bytes) -> Collection: - # relative from .deserialize import _deserialize MAX_TRAVERSAL_LIMIT = 2**64 - 1 - values = [] with iterable_schema.from_bytes( blob, traversal_limit_in_words=MAX_TRAVERSAL_LIMIT ) as msg: - for element in msg.values: - values.append(_deserialize(combine_bytes(element), from_bytes=True)) + values = (_deserialize(combine_bytes(element), from_bytes=True) for element in msg.values) return iterable_type(values) From 6fe6d8bcd3201874b6704db7e3e8ba2a92da1b64 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:32:54 +0200 Subject: [PATCH 041/313] Update recursive_primitives.py --- packages/syft/src/syft/serde/recursive_primitives.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/serde/recursive_primitives.py b/packages/syft/src/syft/serde/recursive_primitives.py index a536950b5b7..5bc2720a353 100644 --- a/packages/syft/src/syft/serde/recursive_primitives.py +++ b/packages/syft/src/syft/serde/recursive_primitives.py @@ -64,6 +64,7 @@ def serialize_iterable(iterable: Collection) -> bytes: def deserialize_iterable(iterable_type: type, blob: bytes) -> Collection: + # relative from .deserialize import _deserialize MAX_TRAVERSAL_LIMIT = 2**64 - 1 @@ -71,7 +72,10 @@ def deserialize_iterable(iterable_type: type, blob: bytes) -> Collection: with iterable_schema.from_bytes( blob, traversal_limit_in_words=MAX_TRAVERSAL_LIMIT ) as msg: - values = (_deserialize(combine_bytes(element), from_bytes=True) for element in msg.values) + values = ( + _deserialize(combine_bytes(element), from_bytes=True) + for element in msg.values + ) return iterable_type(values) From ba697c34aa8eb4065a3f179575b0285c68db2b5b Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:36:29 +0200 Subject: [PATCH 042/313] Update action_object.py --- packages/syft/src/syft/service/action/action_object.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index dffa3d3d9de..a0377f3d229 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1090,11 +1090,9 @@ def syft_make_action( if kwargs is None: kwargs = {} - arg_ids = [] - kwarg_ids = {} + arg_ids = [self._syft_prepare_obj_uid(obj) for obj in args] - for obj in args: - arg_ids.append(self._syft_prepare_obj_uid(obj)) + kwarg_ids = {} for k, obj in kwargs.items(): kwarg_ids[k] = self._syft_prepare_obj_uid(obj) From dab58914067f4712fc4f991b89d312e257f92998 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:38:16 +0200 Subject: [PATCH 043/313] Update action_object.py --- packages/syft/src/syft/service/action/action_object.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index a0377f3d229..15ef93096c5 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -2170,7 +2170,7 @@ def has_action_data_empty(args: Any, kwargs: Any) -> bool: if is_action_data_empty(a): return True - for _, a in kwargs.items(): + for a in kwargs.values(): if is_action_data_empty(a): return True return False From 0bb036c25382aba4c1f8a39111fd2a38e8f3e230 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:41:51 +0200 Subject: [PATCH 044/313] Update api_service.py --- packages/syft/src/syft/service/api/api_service.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/syft/src/syft/service/api/api_service.py b/packages/syft/src/syft/service/api/api_service.py index 55d2df05bf0..5ac2fa02dad 100644 --- a/packages/syft/src/syft/service/api/api_service.py +++ b/packages/syft/src/syft/service/api/api_service.py @@ -238,11 +238,7 @@ def api_endpoints( return SyftError(message=result.err()) all_api_endpoints = result.ok() - api_endpoint_view = [] - for api_endpoint in all_api_endpoints: - api_endpoint_view.append( - api_endpoint.to(TwinAPIEndpointView, context=context) - ) + api_endpoint_view = [api_endpoint.to(TwinAPIEndpointView, context=context) for api_endpoint in all_api_endpoints] return api_endpoint_view From e6457801517931f46f15ec30e49c365e315f2d4e Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:43:06 +0200 Subject: [PATCH 045/313] Update api_service.py --- packages/syft/src/syft/service/api/api_service.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/api/api_service.py b/packages/syft/src/syft/service/api/api_service.py index 5ac2fa02dad..6c6b4da2cde 100644 --- a/packages/syft/src/syft/service/api/api_service.py +++ b/packages/syft/src/syft/service/api/api_service.py @@ -238,7 +238,10 @@ def api_endpoints( return SyftError(message=result.err()) all_api_endpoints = result.ok() - api_endpoint_view = [api_endpoint.to(TwinAPIEndpointView, context=context) for api_endpoint in all_api_endpoints] + api_endpoint_view = [ + api_endpoint.to(TwinAPIEndpointView, context=context) + for api_endpoint in all_api_endpoints + ] return api_endpoint_view From f08f428a0bb6034b4446736fdd2f631d7e157b9c Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:44:13 +0200 Subject: [PATCH 046/313] Update api_service.py --- packages/syft/src/syft/service/api/api_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/api/api_service.py b/packages/syft/src/syft/service/api/api_service.py index 6c6b4da2cde..4084814001c 100644 --- a/packages/syft/src/syft/service/api/api_service.py +++ b/packages/syft/src/syft/service/api/api_service.py @@ -239,7 +239,7 @@ def api_endpoints( all_api_endpoints = result.ok() api_endpoint_view = [ - api_endpoint.to(TwinAPIEndpointView, context=context) + api_endpoint.to(TwinAPIEndpointView, context=context) for api_endpoint in all_api_endpoints ] From 2c692e163faf9c1392f9bd1d028aa59b6a2bdedc Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:55:02 +0200 Subject: [PATCH 047/313] Update user_code.py --- packages/syft/src/syft/service/code/user_code.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 2cbeaf31967..832379fb869 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -442,7 +442,7 @@ def _get_input_policy(self) -> InputPolicy | None: ): # TODO: Tech Debt here node_view_workaround = False - for k, _ in self.input_policy_init_kwargs.items(): + for k in self.input_policy_init_kwargs.keys(): if isinstance(k, NodeIdentity): node_view_workaround = True @@ -727,7 +727,7 @@ def _inner_repr(self, level: int = 0) -> str: [f"{' '*level}{substring}" for substring in md.split("\n")[:-1]] ) if self.nested_codes is not None: - for _, (obj, _) in self.nested_codes.items(): + for obj, _ in self.nested_codes.values(): code = obj.resolve md += "\n" md += code._inner_repr(level=level + 1) @@ -876,7 +876,7 @@ def _ephemeral_node_call( # And need only ActionObjects # Also, this works only on the assumption that all inputs # are ActionObjects, which might change in the future - for _, id in obj_dict.items(): + for id in obj_dict.values(): mock_obj = api.services.action.get_mock(id) if isinstance(mock_obj, SyftError): data_obj = api.services.action.get(id) From 32b43516453753131da5497cfe71c87adfebb663 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 09:58:07 +0200 Subject: [PATCH 048/313] Update code_history.py --- packages/syft/src/syft/service/code_history/code_history.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/code_history/code_history.py b/packages/syft/src/syft/service/code_history/code_history.py index 139fd9e2d7a..34dc7174aa2 100644 --- a/packages/syft/src/syft/service/code_history/code_history.py +++ b/packages/syft/src/syft/service/code_history/code_history.py @@ -137,7 +137,5 @@ def __getitem__(self, key: str | int) -> CodeHistoriesDict | SyftError: return api.services.code_history.get_history_for_user(key) def _repr_html_(self) -> str: - rows = [] - for user, funcs in self.user_dict.items(): - rows += [{"user": user, "UserCodes": funcs}] + rows = [{"user": user, "UserCodes": funcs} for user, funcs in self.user_dict.items()] return create_table_template(rows, "UserCodeHistory", icon=None) From 011b46903ae99472fae575d0681307b6b9b6e6b6 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 10:00:35 +0200 Subject: [PATCH 049/313] Update dataset.py --- packages/syft/src/syft/service/dataset/dataset.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/dataset/dataset.py b/packages/syft/src/syft/service/dataset/dataset.py index 687a2eb5f84..8bbcf83cf2d 100644 --- a/packages/syft/src/syft/service/dataset/dataset.py +++ b/packages/syft/src/syft/service/dataset/dataset.py @@ -484,10 +484,7 @@ def _repr_html_(self) -> Any: """ def action_ids(self) -> list[UID]: - data = [] - for asset in self.asset_list: - if asset.action_id: - data.append(asset.action_id) + data = [asset.action_id for asset in self.asset_list if asset.action_id] return data @property From d9cf2da64a44060632caec3b8dd24195c4ca378d Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 10:03:43 +0200 Subject: [PATCH 050/313] Update dataset_service.py --- .../syft/service/dataset/dataset_service.py | 33 ++++++++----------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/packages/syft/src/syft/service/dataset/dataset_service.py b/packages/syft/src/syft/service/dataset/dataset_service.py index f791d6b9dc2..7e9e6b30758 100644 --- a/packages/syft/src/syft/service/dataset/dataset_service.py +++ b/packages/syft/src/syft/service/dataset/dataset_service.py @@ -177,26 +177,19 @@ def get_by_action_id( return datasets return SyftError(message=result.err()) - @service_method( - path="dataset.get_assets_by_action_id", - name="get_assets_by_action_id", - roles=DATA_SCIENTIST_ROLE_LEVEL, - ) - def get_assets_by_action_id( - self, context: AuthedServiceContext, uid: UID - ) -> list[Asset] | SyftError: - """Get Assets by an Action ID""" - datasets = self.get_by_action_id(context=context, uid=uid) - assets = [] - if isinstance(datasets, list): - for dataset in datasets: - for asset in dataset.asset_list: - if asset.action_id == uid: - assets.append(asset) - return assets - elif isinstance(datasets, SyftError): - return datasets - return [] +@service_method( + path="dataset.get_assets_by_action_id", + name="get_assets_by_action_id", + roles=DATA_SCIENTIST_ROLE_LEVEL, +) +def get_assets_by_action_id( + self, context: AuthedServiceContext, uid: UID +) -> list[Asset] | SyftError: + """Get Assets by an Action ID""" + datasets = self.get_by_action_id(context=context, uid=uid) + if isinstance(datasets, SyftError): + return datasets + return [asset for dataset in datasets for asset in dataset.asset_list if asset.action_id == uid] @service_method( path="dataset.delete_by_uid", From dc81317fd93c45e870f08f7fa462f9531246b6b5 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 10:07:07 +0200 Subject: [PATCH 051/313] Update dataset_service.py --- packages/syft/src/syft/service/dataset/dataset_service.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/dataset/dataset_service.py b/packages/syft/src/syft/service/dataset/dataset_service.py index 7e9e6b30758..3a731d7fe9f 100644 --- a/packages/syft/src/syft/service/dataset/dataset_service.py +++ b/packages/syft/src/syft/service/dataset/dataset_service.py @@ -177,6 +177,7 @@ def get_by_action_id( return datasets return SyftError(message=result.err()) + @service_method( path="dataset.get_assets_by_action_id", name="get_assets_by_action_id", @@ -189,7 +190,12 @@ def get_assets_by_action_id( datasets = self.get_by_action_id(context=context, uid=uid) if isinstance(datasets, SyftError): return datasets - return [asset for dataset in datasets for asset in dataset.asset_list if asset.action_id == uid] + return [ + asset + for dataset in datasets + for asset in dataset.asset_list + if asset.action_id == uid + ] @service_method( path="dataset.delete_by_uid", From 728addc01ccdfe18c8b3687d942d45b795d847af Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 10:07:53 +0200 Subject: [PATCH 052/313] Update code_history.py --- packages/syft/src/syft/service/code_history/code_history.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/code_history/code_history.py b/packages/syft/src/syft/service/code_history/code_history.py index 34dc7174aa2..b5e893c87bf 100644 --- a/packages/syft/src/syft/service/code_history/code_history.py +++ b/packages/syft/src/syft/service/code_history/code_history.py @@ -137,5 +137,7 @@ def __getitem__(self, key: str | int) -> CodeHistoriesDict | SyftError: return api.services.code_history.get_history_for_user(key) def _repr_html_(self) -> str: - rows = [{"user": user, "UserCodes": funcs} for user, funcs in self.user_dict.items()] + rows = [ + {"user": user, "UserCodes": funcs} for user, funcs in self.user_dict.items() + ] return create_table_template(rows, "UserCodeHistory", icon=None) From d2e85861106bc388005e7ca03f181c1e8c4d69aa Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 10:13:07 +0200 Subject: [PATCH 053/313] Update dataset_service.py --- .../syft/service/dataset/dataset_service.py | 37 +++++++++---------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/packages/syft/src/syft/service/dataset/dataset_service.py b/packages/syft/src/syft/service/dataset/dataset_service.py index 3a731d7fe9f..451746fa15a 100644 --- a/packages/syft/src/syft/service/dataset/dataset_service.py +++ b/packages/syft/src/syft/service/dataset/dataset_service.py @@ -177,25 +177,24 @@ def get_by_action_id( return datasets return SyftError(message=result.err()) - -@service_method( - path="dataset.get_assets_by_action_id", - name="get_assets_by_action_id", - roles=DATA_SCIENTIST_ROLE_LEVEL, -) -def get_assets_by_action_id( - self, context: AuthedServiceContext, uid: UID -) -> list[Asset] | SyftError: - """Get Assets by an Action ID""" - datasets = self.get_by_action_id(context=context, uid=uid) - if isinstance(datasets, SyftError): - return datasets - return [ - asset - for dataset in datasets - for asset in dataset.asset_list - if asset.action_id == uid - ] + @service_method( + path="dataset.get_assets_by_action_id", + name="get_assets_by_action_id", + roles=DATA_SCIENTIST_ROLE_LEVEL, + ) + def get_assets_by_action_id( + self, context: AuthedServiceContext, uid: UID + ) -> list[Asset] | SyftError: + """Get Assets by an Action ID""" + datasets = self.get_by_action_id(context=context, uid=uid) + if isinstance(datasets, SyftError): + return datasets + return [ + asset + for dataset in datasets + for asset in dataset.asset_list + if asset.action_id == uid + ] @service_method( path="dataset.delete_by_uid", From c27cbc020ff3b446fd6af5295232054440f1edee Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 10:20:13 +0200 Subject: [PATCH 054/313] Update network_service.py --- .../syft/src/syft/service/network/network_service.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/syft/src/syft/service/network/network_service.py b/packages/syft/src/syft/service/network/network_service.py index b38d822c7f4..874baf28e30 100644 --- a/packages/syft/src/syft/service/network/network_service.py +++ b/packages/syft/src/syft/service/network/network_service.py @@ -900,14 +900,10 @@ def _get_association_requests_by_peer_id( RequestService.get_all ) all_requests: list[Request] = request_get_all_method(context) - association_requests: list[Request] = [] - for request in all_requests: - for change in request.changes: - if ( - isinstance(change, AssociationRequestChange) - and change.remote_peer.id == peer_id - ): - association_requests.append(request) + association_requests: list[Request] = [ + request for request in all_requests + if any(isinstance(change, AssociationRequestChange) and change.remote_peer.id == peer_id for change in request.changes) + ] return sorted( association_requests, key=lambda request: request.request_time.utc_timestamp From 1d36ede91f993228a5963578f9591d4e4c70016f Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 10:21:54 +0200 Subject: [PATCH 055/313] Update network_service.py --- .../syft/src/syft/service/network/network_service.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/network/network_service.py b/packages/syft/src/syft/service/network/network_service.py index 874baf28e30..ac329420168 100644 --- a/packages/syft/src/syft/service/network/network_service.py +++ b/packages/syft/src/syft/service/network/network_service.py @@ -901,8 +901,13 @@ def _get_association_requests_by_peer_id( ) all_requests: list[Request] = request_get_all_method(context) association_requests: list[Request] = [ - request for request in all_requests - if any(isinstance(change, AssociationRequestChange) and change.remote_peer.id == peer_id for change in request.changes) + request + for request in all_requests + if any( + isinstance(change, AssociationRequestChange) + and change.remote_peer.id == peer_id + for change in request.changes + ) ] return sorted( From d85d743c65ef499296058ae6f3c11f20159bf86e Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 15:55:48 +0200 Subject: [PATCH 056/313] Update policy.py --- .../syft/src/syft/service/policy/policy.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index 5bf5739d6fc..736bf85407c 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -72,9 +72,7 @@ def filter_only_uids(results: Any) -> list[UID] | dict[str, UID] | UID: results = [results] if isinstance(results, list): - output_list = [] - for v in results: - output_list.append(extract_uid(v)) + output_list = [extract_uid(v) for v in results] return output_list elif isinstance(results, dict): output_dict = {} @@ -712,14 +710,16 @@ def process_class_code(raw_code: str, class_name: str) -> str: "Tuple", "Type", ] - for typing_type in typing_types: - new_body.append( - ast.ImportFrom( - module="typing", - names=[ast.alias(name=typing_type, asname=typing_type)], - level=0, - ) + new_body.append( + ast.ImportFrom( + module="typing", + names=[ + ast.alias(name=typing_type, asname=typing_type) + for typing_type in typing_types + ], + level=0, ) + ) new_body.append(new_class) module = ast.Module(new_body, type_ignores=[]) try: From 69ef9ed92df10d476a2c69d1e785091158ed4718 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 15:59:15 +0200 Subject: [PATCH 057/313] Update project.py --- packages/syft/src/syft/service/project/project.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/packages/syft/src/syft/service/project/project.py b/packages/syft/src/syft/service/project/project.py index 981f7ff9192..3af678746de 100644 --- a/packages/syft/src/syft/service/project/project.py +++ b/packages/syft/src/syft/service/project/project.py @@ -939,12 +939,11 @@ def create_code_request( reason=reason, ) - def get_messages(self) -> list[ProjectMessage | ProjectThreadMessage]: - messages = [] - for event in self.events: - if isinstance(event, ProjectMessage | ProjectThreadMessage): - messages.append(event) - return messages + def get_messages(self) -> list[ProjectMessage, ProjectThreadMessage]: + return [event + for event in self.events + if isinstance(event, (ProjectMessage, ProjectThreadMessage)) + ] @property def messages(self) -> str: From 50717ee46bd499650821a2fbc6ccc5ae11bca71b Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:01:16 +0200 Subject: [PATCH 058/313] Update project.py --- packages/syft/src/syft/service/project/project.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/project/project.py b/packages/syft/src/syft/service/project/project.py index 3af678746de..d76a45c4250 100644 --- a/packages/syft/src/syft/service/project/project.py +++ b/packages/syft/src/syft/service/project/project.py @@ -940,10 +940,11 @@ def create_code_request( ) def get_messages(self) -> list[ProjectMessage, ProjectThreadMessage]: - return [event - for event in self.events - if isinstance(event, (ProjectMessage, ProjectThreadMessage)) - ] + return [ + event + for event in self.events + if isinstance(event, (ProjectMessage, ProjectThreadMessage)) + ] @property def messages(self) -> str: From fd4800e6d158d9f725a72aba9113de3a677ec8f4 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:03:07 +0200 Subject: [PATCH 059/313] Update project.py --- packages/syft/src/syft/service/project/project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/project/project.py b/packages/syft/src/syft/service/project/project.py index d76a45c4250..3bdbc086d39 100644 --- a/packages/syft/src/syft/service/project/project.py +++ b/packages/syft/src/syft/service/project/project.py @@ -942,7 +942,7 @@ def create_code_request( def get_messages(self) -> list[ProjectMessage, ProjectThreadMessage]: return [ event - for event in self.events + for event in self.events if isinstance(event, (ProjectMessage, ProjectThreadMessage)) ] From e4c20397346e6b0baf9db1694c923501d44f60cb Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:06:47 +0200 Subject: [PATCH 060/313] Update project.py --- packages/syft/src/syft/service/project/project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/project/project.py b/packages/syft/src/syft/service/project/project.py index 3bdbc086d39..3eba53ab9a1 100644 --- a/packages/syft/src/syft/service/project/project.py +++ b/packages/syft/src/syft/service/project/project.py @@ -939,7 +939,7 @@ def create_code_request( reason=reason, ) - def get_messages(self) -> list[ProjectMessage, ProjectThreadMessage]: + def get_messages(self) -> list[ProjectMessage | ProjectThreadMessage]: return [ event for event in self.events From ba3f0ad383ef55dc705daaec23b929f841f19278 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:25:41 +0200 Subject: [PATCH 061/313] Update zmq_queue.py --- .../syft/src/syft/service/queue/zmq_queue.py | 37 +++++++++++++++++-- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 3ad4b732f89..ea53a50b73a 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -300,8 +300,7 @@ def read_items(self) -> None: continue for arg in action.args: self.preprocess_action_arg(arg) - for _, arg in action.kwargs.items(): - self.preprocess_action_arg(arg) + [self.preprocess_action_arg(arg) for _, arg in action.kwargs.items()] msg_bytes = serialize(item, to_bytes=True) worker_pool = item.worker_pool.resolve_with_context( @@ -476,8 +475,38 @@ def _run(self) -> None: if self._stop.is_set(): return - for _, service in self.services.items(): - self.dispatch(service, None) + def _run(self) -> None: + while True: + if self._stop.is_set(): + return + + for service in self.services.keys(): + thread = threading.Thread(target=self.dispatch, args=(service, None)) + thread.start() + + items = None + + try: + items = self.poll_workers.poll(ZMQ_POLLER_TIMEOUT_MSEC) + except Exception as e: + logger.exception("Failed to poll items: {}", e) + + if items: + msg = self.socket.recv_multipart() + + logger.debug("Recieve: {}", msg) + + address = msg.pop(0) + empty = msg.pop(0) # noqa: F841 + header = msg.pop(0) + + if header == QueueMsgProtocol.W_WORKER: + self.process_worker(address, msg) + else: + logger.error("Invalid message header: {}", header) + + self.send_heartbeats() + self.purge_workers() items = None From 7dd349e36dc20878543492a274d17022e8e1194f Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:28:43 +0200 Subject: [PATCH 062/313] Update zmq_queue.py --- packages/syft/src/syft/service/queue/zmq_queue.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index ea53a50b73a..f1e3fbd8f84 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -957,12 +957,12 @@ def send_message( def close(self) -> SyftError | SyftSuccess: try: - for _, consumers in self.consumers.items(): + for consumers in self.consumers.values(): for consumer in consumers: # make sure look is stopped consumer.close() - for _, producer in self.producers.items(): + for producer in self.producers.values(): # make sure loop is stopped producer.close() # close existing connection. From 2cde63933aee3c8b0d0001e0da14d4e2f9417e64 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:36:15 +0200 Subject: [PATCH 063/313] Update request.py --- packages/syft/src/syft/service/request/request.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 2a8ae61bf60..0a09085d90f 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -1250,7 +1250,7 @@ def code(self) -> UserCode: def codes(self) -> list[UserCode]: def recursive_code(node: Any) -> list: codes = [] - for _, (obj, new_node) in node.items(): + for (obj, new_node) in node.values(): codes.append(obj.resolve) codes.extend(recursive_code(new_node)) return codes From d0e3e64cedfa8d2074ee54fc3644dc5c10b02573 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:38:08 +0200 Subject: [PATCH 064/313] Update user_roles.py --- packages/syft/src/syft/service/user/user_roles.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/user/user_roles.py b/packages/syft/src/syft/service/user/user_roles.py index 6ed7f4a9796..34ab6d1ede6 100644 --- a/packages/syft/src/syft/service/user/user_roles.py +++ b/packages/syft/src/syft/service/user/user_roles.py @@ -34,9 +34,7 @@ class ServiceRole(Enum): # @property @classmethod def roles_descending(cls) -> list[tuple[int, Self]]: - tuples = [] - for x in cls: - tuples.append((x.value, x)) + tuples = [(x.value, x) for x in cls] return sorted(tuples, reverse=True) @classmethod From d064c9cb02f7b0ff208c2cd35b2d81ea2959a92e Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:41:20 +0200 Subject: [PATCH 065/313] Update worker_pool.py --- .../syft/src/syft/service/worker/worker_pool.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/service/worker/worker_pool.py b/packages/syft/src/syft/service/worker/worker_pool.py index 24325ed2995..239ff6fde0a 100644 --- a/packages/syft/src/syft/service/worker/worker_pool.py +++ b/packages/syft/src/syft/service/worker/worker_pool.py @@ -180,10 +180,11 @@ def image(self) -> SyftWorkerImage | SyftError | None: @property def running_workers(self) -> list[SyftWorker] | SyftError: """Query the running workers using an API call to the server""" - _running_workers = [] - for worker in self.workers: - if worker.status == WorkerStatus.RUNNING: - _running_workers.append(worker) + _running_workers = [ + worker + for worker in self.workers + if worker.status == WorkerStatus.RUNNING + ] return _running_workers @@ -194,9 +195,11 @@ def healthy_workers(self) -> list[SyftWorker] | SyftError: """ _healthy_workers = [] - for worker in self.workers: - if worker.healthcheck == WorkerHealth.HEALTHY: - _healthy_workers.append(worker) + _healthy_workers = [ + worker + for worker in self.workers + if worker.healthcheck == WorkerHealth.HEALTHY + ] return _healthy_workers From f385c9377b8962349ed10b2aced7af91391be047 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:42:52 +0200 Subject: [PATCH 066/313] Update request.py --- packages/syft/src/syft/service/request/request.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 0a09085d90f..1a5ab9ff895 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -1250,7 +1250,7 @@ def code(self) -> UserCode: def codes(self) -> list[UserCode]: def recursive_code(node: Any) -> list: codes = [] - for (obj, new_node) in node.values(): + for obj, new_node in node.values(): codes.append(obj.resolve) codes.extend(recursive_code(new_node)) return codes From 452124cc3f14bdce72b882be80cc9c6100bce9a0 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:44:38 +0200 Subject: [PATCH 067/313] Update worker_pool.py --- packages/syft/src/syft/service/worker/worker_pool.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/worker/worker_pool.py b/packages/syft/src/syft/service/worker/worker_pool.py index 239ff6fde0a..b3ad441d0f6 100644 --- a/packages/syft/src/syft/service/worker/worker_pool.py +++ b/packages/syft/src/syft/service/worker/worker_pool.py @@ -181,9 +181,7 @@ def image(self) -> SyftWorkerImage | SyftError | None: def running_workers(self) -> list[SyftWorker] | SyftError: """Query the running workers using an API call to the server""" _running_workers = [ - worker - for worker in self.workers - if worker.status == WorkerStatus.RUNNING + worker for worker in self.workers if worker.status == WorkerStatus.RUNNING ] return _running_workers From 502a53677890225ae2d04ce02fe601119a700046 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:46:03 +0200 Subject: [PATCH 068/313] Update project.py --- packages/syft/src/syft/service/project/project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/project/project.py b/packages/syft/src/syft/service/project/project.py index 3eba53ab9a1..e768154ed00 100644 --- a/packages/syft/src/syft/service/project/project.py +++ b/packages/syft/src/syft/service/project/project.py @@ -943,7 +943,7 @@ def get_messages(self) -> list[ProjectMessage | ProjectThreadMessage]: return [ event for event in self.events - if isinstance(event, (ProjectMessage, ProjectThreadMessage)) + if isinstance(event, (ProjectMessage | ProjectThreadMessage)) ] @property From 501c472467bfd8d629bc4fee88f484a859a5b8ca Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:47:49 +0200 Subject: [PATCH 069/313] Update zmq_queue.py --- packages/syft/src/syft/service/queue/zmq_queue.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index f1e3fbd8f84..e60a7d5a942 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -475,11 +475,6 @@ def _run(self) -> None: if self._stop.is_set(): return - def _run(self) -> None: - while True: - if self._stop.is_set(): - return - for service in self.services.keys(): thread = threading.Thread(target=self.dispatch, args=(service, None)) thread.start() From 498eb2b58cae53796f4a241b1691ac6d97afe777 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:49:45 +0200 Subject: [PATCH 070/313] Update zmq_queue.py --- .../syft/src/syft/service/queue/zmq_queue.py | 24 ------------------- 1 file changed, 24 deletions(-) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index e60a7d5a942..6b6f7cdda75 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -503,30 +503,6 @@ def _run(self) -> None: self.send_heartbeats() self.purge_workers() - items = None - - try: - items = self.poll_workers.poll(ZMQ_POLLER_TIMEOUT_MSEC) - except Exception as e: - logger.exception("Failed to poll items: {}", e) - - if items: - msg = self.socket.recv_multipart() - - logger.debug("Recieve: {}", msg) - - address = msg.pop(0) - empty = msg.pop(0) # noqa: F841 - header = msg.pop(0) - - if header == QueueMsgProtocol.W_WORKER: - self.process_worker(address, msg) - else: - logger.error("Invalid message header: {}", header) - - self.send_heartbeats() - self.purge_workers() - def require_worker(self, address: bytes) -> Worker: """Finds the worker (creates if necessary).""" identity = hexlify(address) From 2049f5260c2aa4cddc9a4e17aba1e01b2e39ceda Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:52:48 +0200 Subject: [PATCH 071/313] Update worker_pool_service.py --- packages/syft/src/syft/service/worker/worker_pool_service.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/worker/worker_pool_service.py b/packages/syft/src/syft/service/worker/worker_pool_service.py index 9e7d02572c1..998eefb8462 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_service.py +++ b/packages/syft/src/syft/service/worker/worker_pool_service.py @@ -362,9 +362,7 @@ def get_all( return SyftError(message=f"{result.err()}") worker_pools: list[WorkerPool] = result.ok() - res: list[tuple] = [] - for pool in worker_pools: - res.append((pool.name, pool)) + res = [(pool.name, pool) for pool in worker_pools] return DictTuple(res) @service_method( From 427e67a7f3a0b7414cabf9b51eabbd8c0cab5c02 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:56:02 +0200 Subject: [PATCH 072/313] Update seaweedfs.py --- .../src/syft/store/blob_storage/seaweedfs.py | 29 ++++++++++--------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/packages/syft/src/syft/store/blob_storage/seaweedfs.py b/packages/syft/src/syft/store/blob_storage/seaweedfs.py index 1d88fedda37..930c99ce8a7 100644 --- a/packages/syft/src/syft/store/blob_storage/seaweedfs.py +++ b/packages/syft/src/syft/store/blob_storage/seaweedfs.py @@ -115,23 +115,26 @@ def async_generator( def add_chunks_to_queue( self, + data: IO[bytes], + part_size: int, queue: Queue, chunk_size: int = DEFAULT_UPLOAD_CHUNK_SIZE, ) -> None: - """Creates a data geneator for the part""" + """Creates a data generator for the part""" n = 0 - - while n * chunk_size <= part_size: - try: - chunk = data.read(chunk_size) - self.no_lines += chunk.count(b"\n") - n += 1 - queue.put(chunk) - except BlockingIOError: - # if end of file, stop - queue.put(0) - # if end of part, stop - queue.put(0) + + while True: + if n * chunk_size >= part_size: + break + chunk = data.read(chunk_size) + if not chunk: + break + self.no_lines += chunk.count(b"\n") + n += 1 + queue.put(chunk) + + # Use None to indicate the end of the part or file + queue.put(None) gen = PartGenerator() From 1c7b030b78333124f76c1318d5e4c4937f76f213 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 16:59:51 +0200 Subject: [PATCH 073/313] Update __init__.py --- packages/syft/src/syft/store/blob_storage/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index 7b0f2330875..df736a28b70 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -139,11 +139,12 @@ def syft_iter_content( except requests.exceptions.RequestException as e: if attempt < max_retries: - print( + logging.error( f"Attempt {attempt}/{max_retries} failed: {e} at byte {current_byte}. Retrying..." ) + time.sleep(2 ** attempt) # exponential backoff else: - print(f"Max retries reached. Failed with error: {e}") + logging.error(f"Max retries reached. Failed with error: {e}") raise From 3b7dbaa628a1aff56f83d32192306597cf5f9881 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:01:55 +0200 Subject: [PATCH 074/313] Update dict_document_store.py --- packages/syft/src/syft/store/dict_document_store.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/store/dict_document_store.py b/packages/syft/src/syft/store/dict_document_store.py index d0f6d9cb51f..71f0eced1f1 100644 --- a/packages/syft/src/syft/store/dict_document_store.py +++ b/packages/syft/src/syft/store/dict_document_store.py @@ -80,7 +80,7 @@ def __init__( ) def reset(self) -> None: - for _, partition in self.partitions.items(): + for partition in self.partitions.values(): partition.prune() From 46cec0c32f5f7924e85cadfa5c88a92b35478d9c Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:05:10 +0200 Subject: [PATCH 075/313] Update mongo_document_store.py --- packages/syft/src/syft/store/mongo_document_store.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/packages/syft/src/syft/store/mongo_document_store.py b/packages/syft/src/syft/store/mongo_document_store.py index 59d6799c2bb..68c00957bd9 100644 --- a/packages/syft/src/syft/store/mongo_document_store.py +++ b/packages/syft/src/syft/store/mongo_document_store.py @@ -422,12 +422,11 @@ def _get_all_from_store( syft_objs.append(obj.to(self.settings.object_type, transform_context)) # TODO: maybe do this in loop before this - res = [] - for s in syft_objs: - if has_permission or self.has_permission( - ActionObjectREAD(uid=s.id, credentials=credentials) - ): - res.append(s) + res = [ + s + for s in syft_objs + if has_permission or self.has_permission(ActionObjectREAD(uid=s.id, credentials=credentials)) + ] return Ok(res) def _delete( From 786d9dd04fcd85cf110abb60afffc6c1e2bd632d Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:07:35 +0200 Subject: [PATCH 076/313] Update sqlite_document_store.py --- packages/syft/src/syft/store/sqlite_document_store.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py index 8ef1b2803a8..96a0b70b81f 100644 --- a/packages/syft/src/syft/store/sqlite_document_store.py +++ b/packages/syft/src/syft/store/sqlite_document_store.py @@ -266,7 +266,6 @@ def _get_all(self) -> Any: def _get_all_keys(self) -> Any: select_sql = f"select uid from {self.table_name} order by sqltime" # nosec - keys = [] res = self._execute(select_sql) if res.is_err(): @@ -277,8 +276,7 @@ def _get_all_keys(self) -> Any: if rows is None: return [] - for row in rows: - keys.append(UID(row[0])) + keys = [UID(row[0]) for row in rows] return keys def _delete(self, key: UID) -> None: From d03914fda58a0cf19fa50da5b876d41ceb6e3c88 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:08:08 +0200 Subject: [PATCH 077/313] Update schema.py --- packages/syft/src/syft/util/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/util/schema.py b/packages/syft/src/syft/util/schema.py index f918ed0d4af..cc0958eddda 100644 --- a/packages/syft/src/syft/util/schema.py +++ b/packages/syft/src/syft/util/schema.py @@ -179,7 +179,7 @@ def process_type_bank(type_bank: dict[str, tuple[Any, ...]]) -> dict[str, dict]: def resolve_references(json_mappings: dict[str, dict]) -> dict[str, dict]: # track second pass generated types new_types = {} - for _, json_schema in json_mappings.items(): + for json_schema in json_mappings.values(): replace_types = {} for attribute, config in json_schema["properties"].items(): if "type" in config: From dd46b16238caa133907e6a9a329a1bfe94b5472b Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:10:40 +0200 Subject: [PATCH 078/313] Update dicttuple_test.py --- packages/syft/tests/syft/types/dicttuple_test.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/syft/tests/syft/types/dicttuple_test.py b/packages/syft/tests/syft/types/dicttuple_test.py index de32f2545bc..1a0405b406f 100644 --- a/packages/syft/tests/syft/types/dicttuple_test.py +++ b/packages/syft/tests/syft/types/dicttuple_test.py @@ -39,9 +39,7 @@ def test_dict_tuple_not_subclassing_mapping(): @pytest.mark.parametrize("dict_tuple", SIMPLE_TEST_CASES) def test_should_iter_over_value(dict_tuple: DictTuple) -> None: - values = [] - for v in dict_tuple: - values.append(v) + values = [v for v in dict_tuple] assert values == [1, 2] From f3f80234ffdba97275da60e3d09bfb3f4076ad36 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:11:36 +0200 Subject: [PATCH 079/313] Update user_service_test.py --- packages/syft/tests/syft/users/user_service_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/tests/syft/users/user_service_test.py b/packages/syft/tests/syft/users/user_service_test.py index 7c0cc32562a..cd6d0aeefd4 100644 --- a/packages/syft/tests/syft/users/user_service_test.py +++ b/packages/syft/tests/syft/users/user_service_test.py @@ -219,7 +219,7 @@ def test_userservice_search( guest_user: User, ) -> None: def mock_find_all(credentials: SyftVerifyKey, **kwargs) -> Ok | Err: - for key, _ in kwargs.items(): + for key in kwargs.keys(): if hasattr(guest_user, key): return Ok([guest_user]) return Err("Invalid kwargs") From 7566fc391738463d7814a5ce8bd518a3a7941625 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:15:01 +0200 Subject: [PATCH 080/313] Update __init__.py --- packages/syft/src/syft/store/blob_storage/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index df736a28b70..07d2be0587d 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -142,7 +142,7 @@ def syft_iter_content( logging.error( f"Attempt {attempt}/{max_retries} failed: {e} at byte {current_byte}. Retrying..." ) - time.sleep(2 ** attempt) # exponential backoff + time.sleep(2**attempt) # exponential backoff else: logging.error(f"Max retries reached. Failed with error: {e}") raise From a046c54b948bd7ccf8904edae13c127d132a2257 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:15:37 +0200 Subject: [PATCH 081/313] Update seaweedfs.py --- packages/syft/src/syft/store/blob_storage/seaweedfs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/store/blob_storage/seaweedfs.py b/packages/syft/src/syft/store/blob_storage/seaweedfs.py index 930c99ce8a7..1f94e94cefb 100644 --- a/packages/syft/src/syft/store/blob_storage/seaweedfs.py +++ b/packages/syft/src/syft/store/blob_storage/seaweedfs.py @@ -122,7 +122,7 @@ def add_chunks_to_queue( ) -> None: """Creates a data generator for the part""" n = 0 - + while True: if n * chunk_size >= part_size: break @@ -132,7 +132,7 @@ def add_chunks_to_queue( self.no_lines += chunk.count(b"\n") n += 1 queue.put(chunk) - + # Use None to indicate the end of the part or file queue.put(None) From d5c013d27b537a99f3104f443d6265630feaa5e7 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:16:12 +0200 Subject: [PATCH 082/313] Update mongo_document_store.py --- packages/syft/src/syft/store/mongo_document_store.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/store/mongo_document_store.py b/packages/syft/src/syft/store/mongo_document_store.py index 68c00957bd9..234dd2c723b 100644 --- a/packages/syft/src/syft/store/mongo_document_store.py +++ b/packages/syft/src/syft/store/mongo_document_store.py @@ -425,7 +425,8 @@ def _get_all_from_store( res = [ s for s in syft_objs - if has_permission or self.has_permission(ActionObjectREAD(uid=s.id, credentials=credentials)) + if has_permission + or self.has_permission(ActionObjectREAD(uid=s.id, credentials=credentials)) ] return Ok(res) From 49e82afa2c4a67db1b2e75e2640cc78d0ef2765b Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:18:34 +0200 Subject: [PATCH 083/313] Update __init__.py --- packages/syft/src/syft/store/blob_storage/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index 07d2be0587d..08c773f737d 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -139,12 +139,12 @@ def syft_iter_content( except requests.exceptions.RequestException as e: if attempt < max_retries: - logging.error( + print( f"Attempt {attempt}/{max_retries} failed: {e} at byte {current_byte}. Retrying..." ) time.sleep(2**attempt) # exponential backoff else: - logging.error(f"Max retries reached. Failed with error: {e}") + print(f"Max retries reached. Failed with error: {e}") raise From 3be33d3dc8188ba6b37b88b5e157ce53c675cf5a Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:21:19 +0200 Subject: [PATCH 084/313] Update zmq_queue.py --- packages/syft/src/syft/service/queue/zmq_queue.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 6b6f7cdda75..2a694642d26 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -300,7 +300,10 @@ def read_items(self) -> None: continue for arg in action.args: self.preprocess_action_arg(arg) - [self.preprocess_action_arg(arg) for _, arg in action.kwargs.items()] + [ + self.preprocess_action_arg(arg) + for arg in action.kwargs.values() + ] msg_bytes = serialize(item, to_bytes=True) worker_pool = item.worker_pool.resolve_with_context( From 21300f5ecf98c23cca557d660e67ca37594ffc0f Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:23:07 +0200 Subject: [PATCH 085/313] Update __init__.py --- packages/syft/src/syft/store/blob_storage/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index 08c773f737d..8c99e9d8cb3 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -41,6 +41,7 @@ """ # stdlib +import time from collections.abc import Callable from collections.abc import Generator from io import BytesIO From f3c9736fbfc1d45eb0a55742abc85f9201d8a294 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:29:01 +0200 Subject: [PATCH 086/313] Update seaweedfs.py --- .../src/syft/store/blob_storage/seaweedfs.py | 27 +++++++++---------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/store/blob_storage/seaweedfs.py b/packages/syft/src/syft/store/blob_storage/seaweedfs.py index 1f94e94cefb..1d88fedda37 100644 --- a/packages/syft/src/syft/store/blob_storage/seaweedfs.py +++ b/packages/syft/src/syft/store/blob_storage/seaweedfs.py @@ -115,26 +115,23 @@ def async_generator( def add_chunks_to_queue( self, - data: IO[bytes], - part_size: int, queue: Queue, chunk_size: int = DEFAULT_UPLOAD_CHUNK_SIZE, ) -> None: - """Creates a data generator for the part""" + """Creates a data geneator for the part""" n = 0 - while True: - if n * chunk_size >= part_size: - break - chunk = data.read(chunk_size) - if not chunk: - break - self.no_lines += chunk.count(b"\n") - n += 1 - queue.put(chunk) - - # Use None to indicate the end of the part or file - queue.put(None) + while n * chunk_size <= part_size: + try: + chunk = data.read(chunk_size) + self.no_lines += chunk.count(b"\n") + n += 1 + queue.put(chunk) + except BlockingIOError: + # if end of file, stop + queue.put(0) + # if end of part, stop + queue.put(0) gen = PartGenerator() From 72ab45078925214e54db714c85ffe8963b2c0a6e Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:34:59 +0200 Subject: [PATCH 087/313] Update __init__.py --- packages/syft/src/syft/store/blob_storage/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index 8c99e9d8cb3..a0c4cdd5f6b 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -41,10 +41,10 @@ """ # stdlib -import time from collections.abc import Callable from collections.abc import Generator from io import BytesIO +import time from typing import Any # third party From 149369363ad3902d6a93cf6d700a9269c12cde99 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:40:53 +0200 Subject: [PATCH 088/313] Update zmq_queue.py --- packages/syft/src/syft/service/queue/zmq_queue.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 2a694642d26..266728732d7 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -300,10 +300,8 @@ def read_items(self) -> None: continue for arg in action.args: self.preprocess_action_arg(arg) - [ + for arg in action.kwargs.values(): self.preprocess_action_arg(arg) - for arg in action.kwargs.values() - ] msg_bytes = serialize(item, to_bytes=True) worker_pool = item.worker_pool.resolve_with_context( From acf050e9e044218b7946e2368b32bd62bb2e1923 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 17:44:11 +0200 Subject: [PATCH 089/313] Update dicttuple_test.py --- packages/syft/tests/syft/types/dicttuple_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/tests/syft/types/dicttuple_test.py b/packages/syft/tests/syft/types/dicttuple_test.py index 1a0405b406f..71bf1bb52c9 100644 --- a/packages/syft/tests/syft/types/dicttuple_test.py +++ b/packages/syft/tests/syft/types/dicttuple_test.py @@ -39,7 +39,7 @@ def test_dict_tuple_not_subclassing_mapping(): @pytest.mark.parametrize("dict_tuple", SIMPLE_TEST_CASES) def test_should_iter_over_value(dict_tuple: DictTuple) -> None: - values = [v for v in dict_tuple] + values = list(dict_tuple) assert values == [1, 2] From 0d35a03982a43392212b0f7cbf91c910fa8bc481 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 18:03:39 +0200 Subject: [PATCH 090/313] Update seaweedfs.py --- .../syft/src/syft/store/blob_storage/seaweedfs.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/packages/syft/src/syft/store/blob_storage/seaweedfs.py b/packages/syft/src/syft/store/blob_storage/seaweedfs.py index 1d88fedda37..a63ed8a2d67 100644 --- a/packages/syft/src/syft/store/blob_storage/seaweedfs.py +++ b/packages/syft/src/syft/store/blob_storage/seaweedfs.py @@ -121,16 +121,17 @@ def add_chunks_to_queue( """Creates a data geneator for the part""" n = 0 - while n * chunk_size <= part_size: - try: + try: + while n * chunk_size <= part_size: chunk = data.read(chunk_size) + if not chunk: + break self.no_lines += chunk.count(b"\n") n += 1 queue.put(chunk) - except BlockingIOError: - # if end of file, stop - queue.put(0) - # if end of part, stop + except BlockingIOError: + pass + # if end of file or part, stop queue.put(0) gen = PartGenerator() From 5c733eb7d03e292c7ad593784db6a90e7732bd03 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 18:10:25 +0200 Subject: [PATCH 091/313] Update ruff.toml --- ruff.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ruff.toml b/ruff.toml index 6d8e8a2f93a..395dfbdef04 100644 --- a/ruff.toml +++ b/ruff.toml @@ -14,6 +14,12 @@ select = [ "F", # pyflake "B", # flake8-bugbear "C4", # flake8-comprehensions + # "NPY", # NumPy-specific rules + # "PD", # pandas-vet + "PERF", # Perflint + # "PL", # Pylint + # "PTH", # flake8-use-pathlib + # "SIM", # flake8-simplify "UP", # pyupgrade ] ignore = [ From 42c1abcc8cc239de8a9cd54e5666ae9040385b86 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 18:12:50 +0200 Subject: [PATCH 092/313] Update ruff.toml --- ruff.toml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/ruff.toml b/ruff.toml index 395dfbdef04..aa129013555 100644 --- a/ruff.toml +++ b/ruff.toml @@ -14,12 +14,7 @@ select = [ "F", # pyflake "B", # flake8-bugbear "C4", # flake8-comprehensions - # "NPY", # NumPy-specific rules - # "PD", # pandas-vet "PERF", # Perflint - # "PL", # Pylint - # "PTH", # flake8-use-pathlib - # "SIM", # flake8-simplify "UP", # pyupgrade ] ignore = [ From 62ba0767e1e32b19df369b2ba05b9e56d68ee1f6 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 18:15:39 +0200 Subject: [PATCH 093/313] Update code_history_service.py --- .../src/syft/service/code_history/code_history_service.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/code_history/code_history_service.py b/packages/syft/src/syft/service/code_history/code_history_service.py index f32338ed936..dc356b1f931 100644 --- a/packages/syft/src/syft/service/code_history/code_history_service.py +++ b/packages/syft/src/syft/service/code_history/code_history_service.py @@ -134,9 +134,10 @@ def get_code(uid: UID) -> UserCode | SyftError: code_versions_dict = {} for code_history in code_histories: - user_code_list = [] - for uid in code_history.user_code_history: - user_code_list.append(get_code(uid)) + user_code_list = [ + get_code(uid) + for uid in code_history.user_code_history + ] code_versions = CodeHistoryView( user_code_history=user_code_list, service_func_name=code_history.service_func_name, From 627ec86c9ad61d57c0ff4a39d89714020964f382 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 18:17:25 +0200 Subject: [PATCH 094/313] Update code_history_service.py --- .../syft/src/syft/service/code_history/code_history_service.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/code_history/code_history_service.py b/packages/syft/src/syft/service/code_history/code_history_service.py index dc356b1f931..adfd6dbee5d 100644 --- a/packages/syft/src/syft/service/code_history/code_history_service.py +++ b/packages/syft/src/syft/service/code_history/code_history_service.py @@ -135,8 +135,7 @@ def get_code(uid: UID) -> UserCode | SyftError: for code_history in code_histories: user_code_list = [ - get_code(uid) - for uid in code_history.user_code_history + get_code(uid) for uid in code_history.user_code_history ] code_versions = CodeHistoryView( user_code_history=user_code_list, From ad5e84c95044e70d077f6d1a59b3695a52130304 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 18:39:47 +0200 Subject: [PATCH 095/313] Update __init__.py --- .../syft/src/syft/store/blob_storage/__init__.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index a0c4cdd5f6b..6c2a3966219 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -117,16 +117,16 @@ def read(self, _deserialize: bool = True) -> SyftObject | SyftError: def syft_iter_content( - blob_url: str | GridURL, + blob_url: Union[str, GridURL], chunk_size: int, max_retries: int = MAX_RETRIES, timeout: int = DEFAULT_TIMEOUT, ) -> Generator: - """custom iter content with smart retries (start from last byte read)""" + """Custom iter content with smart retries (start from last byte read)""" current_byte = 0 for attempt in range(max_retries): + headers = {"Range": f"bytes={current_byte}-"} try: - headers = {"Range": f"bytes={current_byte}-"} with requests.get( str(blob_url), stream=True, headers=headers, timeout=(timeout, timeout) ) as response: @@ -136,12 +136,11 @@ def syft_iter_content( ): current_byte += len(chunk) yield chunk - return - + return # If successful, exit the function except requests.exceptions.RequestException as e: - if attempt < max_retries: + if attempt < max_retries - 1: print( - f"Attempt {attempt}/{max_retries} failed: {e} at byte {current_byte}. Retrying..." + f"Attempt {attempt + 1}/{max_retries} failed: {e} at byte {current_byte}. Retrying..." ) time.sleep(2**attempt) # exponential backoff else: From 9da3b3a77ac16fd5a5d76b19f111c9e24d59c366 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 18:42:13 +0200 Subject: [PATCH 096/313] Update __init__.py --- packages/syft/src/syft/store/blob_storage/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index 6c2a3966219..512a984a9f3 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -45,7 +45,7 @@ from collections.abc import Generator from io import BytesIO import time -from typing import Any +from typing import Any, Generator, Union # third party from pydantic import BaseModel From 1332795a8da104c45ed282e276294a29a4c99cd4 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 18:44:45 +0200 Subject: [PATCH 097/313] Update __init__.py --- packages/syft/src/syft/store/blob_storage/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index 512a984a9f3..2223f883900 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -45,7 +45,8 @@ from collections.abc import Generator from io import BytesIO import time -from typing import Any, Generator, Union +from typing import Any +from typing import Union # third party from pydantic import BaseModel From cd167ca7a203b3f830f799645e9ab1f67e3062fc Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Fri, 7 Jun 2024 18:45:33 +0200 Subject: [PATCH 098/313] Update __init__.py --- packages/syft/src/syft/store/blob_storage/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index 2223f883900..9cab0913dde 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -46,7 +46,6 @@ from io import BytesIO import time from typing import Any -from typing import Union # third party from pydantic import BaseModel @@ -118,7 +117,7 @@ def read(self, _deserialize: bool = True) -> SyftObject | SyftError: def syft_iter_content( - blob_url: Union[str, GridURL], + blob_url: str | GridURL, chunk_size: int, max_retries: int = MAX_RETRIES, timeout: int = DEFAULT_TIMEOUT, From ecbdd42c7e719011772fd7fff242af6a1fb3f7aa Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Sat, 8 Jun 2024 01:53:01 +0200 Subject: [PATCH 099/313] Update recursive_primitives.py --- packages/syft/src/syft/serde/recursive_primitives.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/serde/recursive_primitives.py b/packages/syft/src/syft/serde/recursive_primitives.py index 5bc2720a353..042234cd843 100644 --- a/packages/syft/src/syft/serde/recursive_primitives.py +++ b/packages/syft/src/syft/serde/recursive_primitives.py @@ -72,10 +72,10 @@ def deserialize_iterable(iterable_type: type, blob: bytes) -> Collection: with iterable_schema.from_bytes( blob, traversal_limit_in_words=MAX_TRAVERSAL_LIMIT ) as msg: - values = ( + values = [ _deserialize(combine_bytes(element), from_bytes=True) for element in msg.values - ) + ] return iterable_type(values) From 3f9dc435a2b0a924c9df8692772eec77c0618ebc Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Mon, 10 Jun 2024 18:05:50 +0200 Subject: [PATCH 100/313] fix mongo issues --- .../syft/src/syft/service/action/action_object.py | 11 ++++++++++- .../syft/src/syft/service/action/action_service.py | 8 ++++++-- packages/syft/src/syft/types/twin_object.py | 12 +++++++----- 3 files changed, 23 insertions(+), 8 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index dffa3d3d9de..9eed1816b84 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -318,6 +318,7 @@ class ActionObjectPointer: "_data_repr", "syft_eq", # syft "__table_coll_widths__", + "_clear_cache", ] dont_wrap_output_attrs = [ "__repr__", @@ -341,6 +342,7 @@ class ActionObjectPointer: "get_sync_dependencies", # syft "syft_eq", # syft "__table_coll_widths__", + "_clear_cache", ] dont_make_side_effects = [ "__repr_attrs__", @@ -362,6 +364,7 @@ class ActionObjectPointer: "get_sync_dependencies", "syft_eq", # syft "__table_coll_widths__", + "_clear_cache", ] action_data_empty_must_run = [ "__repr__", @@ -857,9 +860,15 @@ def _save_to_blob_storage(self) -> SyftError | None: if isinstance(result, SyftError): return result if not TraceResultRegistry.current_thread_is_tracing(): - self.syft_action_data_cache = self.as_empty_data() + self._clear_cache() return None + def _clear_cache(self, clear_reprs: bool = False) -> None: + self.syft_action_data_cache = self.as_empty_data() + if clear_reprs: + self.syft_action_data_repr_ = "" + self.syft_action_data_str_ = "" + @property def is_pointer(self) -> bool: return self.syft_node_uid is not None diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 9c3808e54f1..046c8f73f43 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -102,9 +102,13 @@ def _set( if isinstance(action_object, ActionObject): action_object.syft_created_at = DateTime.now() + action_object._clear_cache(clear_reprs=True) else: - action_object.private_obj.syft_created_at = DateTime.now() # type: ignore[unreachable] - action_object.mock_obj.syft_created_at = DateTime.now() + twin_object = action_object + twin_object.private_obj.syft_created_at = DateTime.now() # type: ignore[unreachable] + twin_object.mock_obj.syft_created_at = DateTime.now() + twin_object.private_obj._clear_cache() + twin_object.mock._clear_cache(clear_reprs=True) # If either context or argument is True, has_result_read_permission is True has_result_read_permission = ( diff --git a/packages/syft/src/syft/types/twin_object.py b/packages/syft/src/syft/types/twin_object.py index 458c69c0923..2ab0a4780f3 100644 --- a/packages/syft/src/syft/types/twin_object.py +++ b/packages/syft/src/syft/types/twin_object.py @@ -87,9 +87,11 @@ def _save_to_blob_storage(self) -> SyftError | None: self.syft_node_location, self.syft_client_verify_key, ) - # self.mock_obj._set_obj_location_( - # self.syft_node_location, - # self.syft_client_verify_key, - # ) + self.mock_obj._set_obj_location_( + self.syft_node_location, + self.syft_client_verify_key, + ) + mock_store_res = self.mock_obj._save_to_blob_storage() + if isinstance(mock_store_res, SyftError): + return mock_store_res return self.private_obj._save_to_blob_storage() - # self.mock_obj._save_to_blob_storage() From 23b88b165a6b308d71bbdbbaba7b678d43dbcbcc Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Mon, 10 Jun 2024 18:48:13 +0200 Subject: [PATCH 101/313] refactor actionobj name --- packages/syft/src/syft/service/action/action_service.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 046c8f73f43..fb7a412c00f 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -104,11 +104,10 @@ def _set( action_object.syft_created_at = DateTime.now() action_object._clear_cache(clear_reprs=True) else: - twin_object = action_object - twin_object.private_obj.syft_created_at = DateTime.now() # type: ignore[unreachable] - twin_object.mock_obj.syft_created_at = DateTime.now() - twin_object.private_obj._clear_cache() - twin_object.mock._clear_cache(clear_reprs=True) + action_object.private_obj.syft_created_at = DateTime.now() # type: ignore[unreachable] + action_object.mock_obj.syft_created_at = DateTime.now() + action_object.private_obj._clear_cache(clear_reprs=True) + action_object.mock._clear_cache(clear_reprs=True) # If either context or argument is True, has_result_read_permission is True has_result_read_permission = ( From e4610fe80edba46f9434cf5c786cdb8e31f655fe Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Tue, 11 Jun 2024 06:56:19 +0200 Subject: [PATCH 102/313] Update node.py --- packages/syft/src/syft/node/node.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 65a3ed80eda..31ead514b2b 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -833,7 +833,6 @@ def get_guest_client(self, verbose: bool = True) -> SyftClient: def __repr__(self) -> str: service_string = "" if not self.is_subprocess: - services = [] services = [service.__name__ for service in self.services] service_string = ", ".join(sorted(services)) service_string = f"\n\nServices:\n{service_string}" From e1196b6e3d3c6ba29190a8ada0dddd0c5d6566c5 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Tue, 11 Jun 2024 06:57:30 +0200 Subject: [PATCH 103/313] Update action_object.py --- packages/syft/src/syft/service/action/action_object.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 15ef93096c5..8bdf940a799 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1092,10 +1092,7 @@ def syft_make_action( arg_ids = [self._syft_prepare_obj_uid(obj) for obj in args] - kwarg_ids = {} - - for k, obj in kwargs.items(): - kwarg_ids[k] = self._syft_prepare_obj_uid(obj) + kwarg_ids = {k: self._syft_prepare_obj_uid(obj) for k, obj in kwargs.items()} action = Action( path=path, From 9f533b49d45df93f72da5a860aa8fc8591d2b380 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Tue, 11 Jun 2024 15:38:17 +0200 Subject: [PATCH 104/313] Update client.py --- packages/syft/src/syft/client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 72a048d2d2b..95e31023dd5 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -589,7 +589,7 @@ def get_nested_codes(code: UserCode) -> list[UserCode]: if code.nested_codes is None: return result - for linked_code_obj in code.nested_codes.keys(): + for linked_code_obj in code.nested_codes.values(): nested_code = linked_code_obj.resolve nested_code = deepcopy(nested_code) nested_code.node_uid = code.node_uid From 1d85690ab42bcf1ad8f05dd08fc41264b591a981 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Wed, 12 Jun 2024 06:38:48 +0200 Subject: [PATCH 105/313] Update client.py --- packages/syft/src/syft/client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 95e31023dd5..dbc8740b421 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -589,7 +589,7 @@ def get_nested_codes(code: UserCode) -> list[UserCode]: if code.nested_codes is None: return result - for linked_code_obj in code.nested_codes.values(): + for (linked_code_obj, _) in code.nested_codes.values(): nested_code = linked_code_obj.resolve nested_code = deepcopy(nested_code) nested_code.node_uid = code.node_uid From 1968fb223dc3ef6936fc16c6d7d1688c0d9432bb Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Wed, 12 Jun 2024 06:39:41 +0200 Subject: [PATCH 106/313] Update zmq_queue.py --- packages/syft/src/syft/service/queue/zmq_queue.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 266728732d7..ed8aaa662f2 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -476,7 +476,7 @@ def _run(self) -> None: if self._stop.is_set(): return - for service in self.services.keys(): + for service in self.services.values(): thread = threading.Thread(target=self.dispatch, args=(service, None)) thread.start() From 150771c39578d9b9564bbbdd434d5b9b490a0472 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Wed, 12 Jun 2024 06:41:06 +0200 Subject: [PATCH 107/313] Update worker_pool_service.py --- packages/syft/src/syft/service/worker/worker_pool_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/worker/worker_pool_service.py b/packages/syft/src/syft/service/worker/worker_pool_service.py index 998eefb8462..d42645a19bb 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_service.py +++ b/packages/syft/src/syft/service/worker/worker_pool_service.py @@ -362,7 +362,7 @@ def get_all( return SyftError(message=f"{result.err()}") worker_pools: list[WorkerPool] = result.ok() - res = [(pool.name, pool) for pool in worker_pools] + res = ((pool.name, pool) for pool in worker_pools) return DictTuple(res) @service_method( From 8b055fdc5bb6fa434073fd4ce9706b69b1ed67b2 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Wed, 12 Jun 2024 06:46:43 +0200 Subject: [PATCH 108/313] Update worker_pool.py --- packages/syft/src/syft/service/worker/worker_pool.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/syft/src/syft/service/worker/worker_pool.py b/packages/syft/src/syft/service/worker/worker_pool.py index b3ad441d0f6..461c4f61b7d 100644 --- a/packages/syft/src/syft/service/worker/worker_pool.py +++ b/packages/syft/src/syft/service/worker/worker_pool.py @@ -191,8 +191,6 @@ def healthy_workers(self) -> list[SyftWorker] | SyftError: """ Query the healthy workers using an API call to the server """ - _healthy_workers = [] - _healthy_workers = [ worker for worker in self.workers From 3ee05ab2f9039b910ada3b65ab99f6bb84e5f767 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Wed, 12 Jun 2024 06:52:26 +0200 Subject: [PATCH 109/313] Update __init__.py --- packages/syft/src/syft/store/blob_storage/__init__.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index b75c038c0d4..15658ad4c8c 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -44,7 +44,6 @@ from collections.abc import Callable from collections.abc import Generator from io import BytesIO -import time from typing import Any # third party @@ -138,11 +137,10 @@ def syft_iter_content( yield chunk return # If successful, exit the function except requests.exceptions.RequestException as e: - if attempt < max_retries - 1: + if attempt < max_retries: print( - f"Attempt {attempt + 1}/{max_retries} failed: {e} at byte {current_byte}. Retrying..." + f"Attempt {attempt}/{max_retries} failed: {e} at byte {current_byte}. Retrying..." ) - time.sleep(2**attempt) # exponential backoff else: print(f"Max retries reached. Failed with error: {e}") raise From a30d33c84ce19222d34e80e8f7f1adbc7707a481 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Wed, 12 Jun 2024 07:24:12 +0200 Subject: [PATCH 110/313] Update zmq_queue.py --- packages/syft/src/syft/service/queue/zmq_queue.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index ed8aaa662f2..6cf4cf3794f 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -477,8 +477,7 @@ def _run(self) -> None: return for service in self.services.values(): - thread = threading.Thread(target=self.dispatch, args=(service, None)) - thread.start() + self.dispatch(service, None) items = None From 736366602eb0f82c13a95c428e6b238ab36ed6f0 Mon Sep 17 00:00:00 2001 From: Olivier DEBAUCHE Date: Wed, 12 Jun 2024 07:37:47 +0200 Subject: [PATCH 111/313] Update client.py --- packages/syft/src/syft/client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index dbc8740b421..f0d9c55b340 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -589,7 +589,7 @@ def get_nested_codes(code: UserCode) -> list[UserCode]: if code.nested_codes is None: return result - for (linked_code_obj, _) in code.nested_codes.values(): + for linked_code_obj, _ in code.nested_codes.values(): nested_code = linked_code_obj.resolve nested_code = deepcopy(nested_code) nested_code.node_uid = code.node_uid From 2ddd65dbfab23a9b131cf6a6109f85fc0851873f Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 12 Jun 2024 10:25:32 +0200 Subject: [PATCH 112/313] add lowside check --- packages/syft/src/syft/service/request/request.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index a96aa0845fd..dd15381e76f 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -628,7 +628,7 @@ def deny(self, reason: str) -> SyftSuccess | SyftError: Args: reason (str): Reason for which the request has been denied. """ - if reason is None: + if not reason: return SyftError("Please provide a reason for denying the request.") api = self._get_api() @@ -760,7 +760,7 @@ def _create_action_object_for_deposited_result( # Ensure result is an ActionObject if isinstance(result, ActionObject): - existing_job = self._get_job_from_action_object(result) + existing_job = api.services.job.get_by_result_id(result.id.id) if existing_job is not None: return SyftError( message=f"This ActionObject is already the result of Job {existing_job.id}" @@ -815,7 +815,8 @@ def _create_output_history_for_deposited_result( def deposit_result( self, result: Any, - logs: str = "", + log_stdout: str = "", + log_stderr: str = "", ) -> Job | SyftError: """ Adds a result to this Request: @@ -841,6 +842,11 @@ def deposit_result( if isinstance(code, SyftError): return code + if not self.code.is_low_side: + return SyftError( + message="deposit_result is only available for low side requests. Please use request.approve() instead." + ) + # Create ActionObject action_object = self._create_action_object_for_deposited_result(result) if isinstance(action_object, SyftError): @@ -851,7 +857,8 @@ def deposit_result( job = api.services.job.create_job_for_user_code_id( code.id, result=action_object, - log_stdout=logs, + log_stdout=log_stdout, + log_stderr=log_stderr, status=JobStatus.COMPLETED, add_code_owner_read_permissions=False, ) From de5f265064eed165988fb3e875a07ccf78e7192c Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 12 Jun 2024 10:25:53 +0200 Subject: [PATCH 113/313] add lowside check --- packages/syft/src/syft/service/request/request.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index dd15381e76f..863c384549e 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -628,9 +628,6 @@ def deny(self, reason: str) -> SyftSuccess | SyftError: Args: reason (str): Reason for which the request has been denied. """ - if not reason: - return SyftError("Please provide a reason for denying the request.") - api = self._get_api() if isinstance(api, SyftError): return api From 7dda4a124fe4847e90d33ffb867c87ff7731bca0 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 12 Jun 2024 10:28:09 +0200 Subject: [PATCH 114/313] remove dead code --- .../syft/src/syft/service/request/request.py | 55 ------------------- 1 file changed, 55 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 863c384549e..e59d935a099 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -52,7 +52,6 @@ from ..context import AuthedServiceContext from ..context import ChangeContext from ..job.job_stash import Job -from ..job.job_stash import JobInfo from ..job.job_stash import JobStatus from ..notification.notifications import Notification from ..response import SyftError @@ -712,41 +711,6 @@ def save(self, context: AuthedServiceContext) -> Result[SyftSuccess, SyftError]: save_method = context.node.get_service_method(RequestService.save) return save_method(context=context, request=self) - def _get_latest_or_create_job(self) -> Job | SyftError: - """Get the latest job for this requests user_code, or creates one if no jobs exist""" - api = APIRegistry.api_for(self.node_uid, self.syft_client_verify_key) - if api is None: - return SyftError(message=f"api is None. You must login to {self.node_uid}") - job_service = api.services.job - - existing_jobs = job_service.get_by_user_code_id(self.code.id) - if isinstance(existing_jobs, SyftError): - return existing_jobs - - if len(existing_jobs) == 0: - print("Creating job for existing user code") - job = job_service.create_job_for_user_code_id(self.code.id) - else: - job = existing_jobs[-1] - res = job_service.add_read_permission_job_for_code_owner(job, self.code) - print(res) - res = job_service.add_read_permission_log_for_code_owner( - job.log_id, self.code - ) - print(res) - - return job - - def _get_job_from_action_object(self, action_object: ActionObject) -> Job | None: # type: ignore - api = APIRegistry.api_for(self.node_uid, self.syft_client_verify_key) - if api is None: - raise ValueError(f"Can't access the api. You must login to {self.node_uid}") - job_service = api.services.job - existing_jobs = job_service.get_by_user_code_id(self.code.id) - for job in existing_jobs: - if job.result and job.result.id == action_object.id: - return job - def _create_action_object_for_deposited_result( self, result: Any, @@ -877,25 +841,6 @@ def deposit_result( def accept_by_depositing_result(self, result: Any, force: bool = False) -> Any: pass - def sync_job( - self, job_info: JobInfo, **kwargs: Any - ) -> Result[SyftSuccess, SyftError]: - if job_info.includes_result: - return SyftError( - message="This JobInfo includes a Result. Please use Request.accept_by_depositing_result instead." - ) - - api = APIRegistry.api_for( - node_uid=self.node_uid, user_verify_key=self.syft_client_verify_key - ) - if api is None: - return SyftError(message=f"api is None. You must login to {self.node_uid}") - job_service = api.services.job - - job = self._get_latest_or_create_job() - job.apply_info(job_info) - return job_service.update(job) - def get_sync_dependencies( self, context: AuthedServiceContext ) -> list[UID] | SyftError: From 5ebe191d5e18750b239d8a3f76484be4f354985b Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 12 Jun 2024 14:16:40 +0200 Subject: [PATCH 115/313] fix happy path --- packages/syft/src/syft/client/api.py | 5 +- .../src/syft/protocol/protocol_version.json | 19 ++++++ .../syft/src/syft/service/code/user_code.py | 55 +++++++++++++++- .../syft/service/code/user_code_service.py | 29 ++++++--- .../src/syft/service/output/output_service.py | 17 ++--- .../syft/src/syft/service/request/request.py | 63 +++++++++++++------ 6 files changed, 148 insertions(+), 40 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index c4f87e5d53a..022f9737b3a 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -1062,8 +1062,11 @@ def make_call(self, api_call: SyftAPICall, cache_result: bool = True) -> Result: if isinstance(result, CachedSyftObject): if result.error_msg is not None: if cache_result: + msg = "Loading results from cache." + if result.error_msg: + msg = f"{result.error_msg}. {msg}" prompt_warning_message( - message=f"{result.error_msg}. Loading results from cache." + message=msg, ) else: result = SyftError(message=result.error_msg) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 375aa1af66b..a383eed61dd 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -272,6 +272,25 @@ "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", "action": "remove" } + }, + "UserCode": { + "4": { + "version": 4, + "hash": "0a7181cd5f76800b6566175ffa7276d0cf38c4ddc5110114430147dfc8bfdb2a", + "action": "remove" + }, + "5": { + "version": 5, + "hash": "128705a5fdf308055ef857b25c80966c928938a05ec03459dae9b36bd6122aa2", + "action": "add" + } + }, + "SyncedUserCodeStatusChange": { + "3": { + "version": 3, + "hash": "9b8ab2d513d84006bdd1329cd0bb636e7e62100a6227d8b772a5bf7c0c45b72f", + "action": "add" + } } } } diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index be563394ac9..8c636fc4da1 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -43,15 +43,19 @@ from ...store.document_store import PartitionKey from ...store.linked_obj import LinkedObject from ...types.datetime import DateTime +from ...types.syft_migration import migrate from ...types.syft_object import PartialSyftObject from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_4 +from ...types.syft_object import SYFT_OBJECT_VERSION_5 from ...types.syft_object import SyftObject from ...types.syncable_object import SyncableSyftObject from ...types.transforms import TransformContext from ...types.transforms import add_node_uid_for_key +from ...types.transforms import drop from ...types.transforms import generate_id +from ...types.transforms import make_set_default from ...types.transforms import transform from ...types.uid import UID from ...util import options @@ -256,11 +260,41 @@ def get_sync_dependencies(self, context: AuthedServiceContext) -> list[UID]: return [self.user_code_link.object_uid] +class UserCodeV4(SyncableSyftObject): + # version + __canonical_name__ = "UserCode" + __version__ = SYFT_OBJECT_VERSION_4 + + id: UID + node_uid: UID | None = None + user_verify_key: SyftVerifyKey + raw_code: str + input_policy_type: type[InputPolicy] | UserPolicy + input_policy_init_kwargs: dict[Any, Any] | None = None + input_policy_state: bytes = b"" + output_policy_type: type[OutputPolicy] | UserPolicy + output_policy_init_kwargs: dict[Any, Any] | None = None + output_policy_state: bytes = b"" + parsed_code: str + service_func_name: str + unique_func_name: str + user_unique_func_name: str + code_hash: str + signature: inspect.Signature + status_link: LinkedObject + input_kwargs: list[str] + enclave_metadata: EnclaveMetadata | None = None + submit_time: DateTime | None = None + uses_domain: bool = False # tracks if the code calls domain.something, variable is set during parsing + nested_codes: dict[str, tuple[LinkedObject, dict]] | None = {} + worker_pool_name: str | None = None + + @serializable() class UserCode(SyncableSyftObject): # version __canonical_name__ = "UserCode" - __version__ = SYFT_OBJECT_VERSION_4 + __version__ = SYFT_OBJECT_VERSION_5 id: UID node_uid: UID | None = None @@ -726,7 +760,8 @@ def get_sync_dependencies( ] dependencies.extend(nested_code_ids) - dependencies.append(self.status_link.object_uid) + if self.status_link is not None: + dependencies.append(self.status_link.object_uid) return dependencies @@ -1710,3 +1745,19 @@ def load_approved_policy_code( load_policy_code(user_code.output_policy_type) except Exception as e: raise Exception(f"Failed to load code: {user_code}: {e}") + + +@migrate(UserCodeV4, UserCode) +def migrate_usercode_v4_to_v5() -> list[Callable]: + return [ + make_set_default("origin_node_side_type", NodeSideType.HIGH_SIDE), + make_set_default("l0_deny_reason", None), + ] + + +@migrate(UserCode, UserCodeV4) +def migrate_usercode_v5_to_v4() -> list[Callable]: + return [ + drop("origin_node_side_type"), + drop("l0_deny_reason"), + ] diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 2141806599f..50527102f2e 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -28,6 +28,7 @@ from ..policy.policy import OutputPolicy from ..request.request import Request from ..request.request import SubmitRequest +from ..request.request import SyncedUserCodeStatusChange from ..request.request import UserCodeStatusChange from ..request.request_service import RequestService from ..response import SyftError @@ -143,9 +144,11 @@ def _request_code_execution( root_context = AuthedServiceContext( credentials=context.node.verify_key, node=context.node ) - _ = context.node.get_service("usercodestatusservice").remove( - root_context, user_code.status_link.object_uid - ) + + if user_code.status_link is not None: + _ = context.node.get_service("usercodestatusservice").remove( + root_context, user_code.status_link.object_uid + ) return result result = self._request_code_execution_inner(context, user_code, reason) return result @@ -220,12 +223,20 @@ def _request_code_execution_inner( code_link = LinkedObject.from_obj(user_code, node_uid=context.node.id) - CODE_EXECUTE = UserCodeStatusChange( - value=UserCodeStatus.APPROVED, - linked_obj=user_code.status_link, - linked_user_code=code_link, - ) - changes = [CODE_EXECUTE] + # Requests made on low side are synced, and have their status computed instead of set manually. + if user_code.is_low_side: + status_change = SyncedUserCodeStatusChange( + value=UserCodeStatus.APPROVED, + linked_obj=user_code.status_link, + linked_user_code=code_link, + ) + else: + status_change = UserCodeStatusChange( + value=UserCodeStatus.APPROVED, + linked_obj=user_code.status_link, + linked_user_code=code_link, + ) + changes = [status_change] request = SubmitRequest(changes=changes) method = context.node.get_service_method(RequestService.submit) diff --git a/packages/syft/src/syft/service/output/output_service.py b/packages/syft/src/syft/service/output/output_service.py index cc60e06dcd1..4bafc24090c 100644 --- a/packages/syft/src/syft/service/output/output_service.py +++ b/packages/syft/src/syft/service/output/output_service.py @@ -1,6 +1,5 @@ # stdlib from typing import ClassVar -from typing import cast # third party from pydantic import model_validator @@ -25,7 +24,6 @@ from ...util.telemetry import instrument from ..action.action_object import ActionObject from ..action.action_permissions import ActionObjectREAD -from ..action.action_service import ActionService from ..context import AuthedServiceContext from ..response import SyftError from ..service import AbstractService @@ -311,16 +309,19 @@ def has_output_read_permissions( user_code_id: UID, code_owner_verify_key: SyftVerifyKey, ) -> bool: - action_service = cast(ActionService, context.node.get_service("actionservice")) + action_service = context.node.get_service("actionservice") all_outputs = self.get_by_user_code_id(context, user_code_id) if isinstance(all_outputs, SyftError): + print(all_outputs.message) return False + print("OUTPUTS", all_outputs) for output in all_outputs: - # Check if this output has permissions - if not self.stash.has_permission( - ActionObjectREAD(uid=output.id, credentials=code_owner_verify_key) - ): - continue + # TODO tech debt: unclear why code owner can see outputhistory without permissions. + # It is not a security issue (output history has no data) it is confusing for user + # if not self.stash.has_permission( + # ActionObjectREAD(uid=output.id, credentials=code_owner_verify_key) + # ): + # continue # Check if all output ActionObjects have permissions result_ids = output.output_id_list diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 568847bc850..910eaeb9890 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -65,6 +65,15 @@ class RequestStatus(Enum): REJECTED = 1 APPROVED = 2 + @classmethod + def from_usercode_status(cls, status: UserCodeStatusCollection) -> "RequestStatus": + if status.approved: + return RequestStatus.APPROVED + elif status.denied: + return RequestStatus.REJECTED + else: + return RequestStatus.PENDING + @serializable() class Change(SyftObject): @@ -567,12 +576,7 @@ def icon(self) -> str: def status(self) -> RequestStatus: if self.code.is_low_side: code_status = self.code.status - if code_status == UserCodeStatus.PENDING: - return RequestStatus.PENDING - elif code_status == UserCodeStatus.DENIED: - return RequestStatus.REJECTED - elif code_status == UserCodeStatus.APPROVED: - return RequestStatus.APPROVED + return RequestStatus.from_usercode_status(code_status) if len(self.history) == 0: return RequestStatus.PENDING @@ -1208,20 +1212,6 @@ def valid(self) -> SyftSuccess | SyftError: ) return SyftSuccess(message=f"{type(self)} valid") - # def get_nested_requests(self, context, code_tree: Dict[str: Tuple[LinkedObject, Dict]]): - # approved_nested_codes = {} - # for key, (linked_obj, new_code_tree) in code_tree.items(): - # code_obj = linked_obj.resolve_with_context(context).ok() - # approved_nested_codes[key] = code_obj.id - - # res = self.get_nested_requests(context, new_code_tree) - # if isinstance(res, SyftError): - # return res - # code_obj.nested_codes = res - # linked_obj.update_with_context(context, code_obj) - - # return approved_nested_codes - def mutate( self, status: UserCodeStatusCollection, @@ -1310,3 +1300,36 @@ def link(self) -> SyftObject | None: if self.linked_obj: return self.linked_obj.resolve return None + + +@serializable() +class SyncedUserCodeStatusChange(UserCodeStatusChange): + __canonical_name__ = "SyncedUserCodeStatusChange" + __version__ = SYFT_OBJECT_VERSION_3 + linked_obj: LinkedObject | None = None + + @property + def approved(self) -> bool: + return self.code.status.approved + + def mutate( + self, + status: UserCodeStatusCollection, + context: ChangeContext, + undo: bool, + ) -> UserCodeStatusCollection | SyftError: + return SyftError( + message="Synced UserCodes status is computed, and cannot be updated manually." + ) + + def _run( + self, context: ChangeContext, apply: bool + ) -> Result[SyftSuccess, SyftError]: + return Ok( + SyftError( + message="Synced UserCodes status is computed, and cannot be updated manually." + ) + ) + + def link(self) -> SyftObject | None: + return None From 1149a07dea5f671304d0c233807adde517d5be40 Mon Sep 17 00:00:00 2001 From: Kien Dang Date: Wed, 12 Jun 2024 20:17:54 +0800 Subject: [PATCH 116/313] Disable linter rule for the specific syntax test --- packages/syft/tests/syft/types/dicttuple_test.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/syft/tests/syft/types/dicttuple_test.py b/packages/syft/tests/syft/types/dicttuple_test.py index 71bf1bb52c9..43beb3116c2 100644 --- a/packages/syft/tests/syft/types/dicttuple_test.py +++ b/packages/syft/tests/syft/types/dicttuple_test.py @@ -39,7 +39,9 @@ def test_dict_tuple_not_subclassing_mapping(): @pytest.mark.parametrize("dict_tuple", SIMPLE_TEST_CASES) def test_should_iter_over_value(dict_tuple: DictTuple) -> None: - values = list(dict_tuple) + values = [] + for v in dict_tuple: + values.append(v) # noqa: PERF402 assert values == [1, 2] From 5f918351526cda87770b44c2130294257022393f Mon Sep 17 00:00:00 2001 From: Kien Dang Date: Wed, 12 Jun 2024 20:21:19 +0800 Subject: [PATCH 117/313] Disable ruff PERF rules for now Since most PERF rules' autofixes are unsafe, adding them to run automatically as part of pre-commit would be a bit disruptive. Would be nice to run this from time to time, or enable them once the autofixes are stable. --- ruff.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ruff.toml b/ruff.toml index aa129013555..3dccdf65b91 100644 --- a/ruff.toml +++ b/ruff.toml @@ -14,7 +14,7 @@ select = [ "F", # pyflake "B", # flake8-bugbear "C4", # flake8-comprehensions - "PERF", # Perflint + # "PERF", # perflint "UP", # pyupgrade ] ignore = [ From 05b6dddc2a3f2e3eb4c8609dec4ad867102d752f Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Wed, 12 Jun 2024 15:50:40 +0200 Subject: [PATCH 118/313] - --- .../src/syft/service/action/action_service.py | 7 +-- .../syft/src/syft/service/job/job_stash.py | 9 ++-- packages/syft/src/syft/service/queue/queue.py | 7 +++ .../syft/src/syft/service/queue/zmq_queue.py | 50 ++++++++++++++++--- 4 files changed, 59 insertions(+), 14 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index fb7a412c00f..98c77a190a6 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -336,10 +336,14 @@ def _user_code_execute( if is_approved.is_err(): return is_approved else: + result = retrieve_from_db(code_item.id, kwargs, context) if isinstance(result, SyftError): return Err(result.message) filtered_kwargs = result.ok() + import sys + print("BBB", file=sys.stderr) + # print(filtered_kwargs["data"].syft_blob_storage_entry_id, file=sys.stderr) # update input policy to track any input state has_twin_inputs = False @@ -354,7 +358,6 @@ def _user_code_execute( try: if not has_twin_inputs: - # no twins filtered_kwargs = filter_twin_kwargs( real_kwargs, twin_mode=TwinMode.NONE ) @@ -417,8 +420,6 @@ def _user_code_execute( mock_obj=result_action_object_mock, ) except Exception as e: - # import traceback - # return Err(f"_user_code_execute failed. {e} {traceback.format_exc()}") return Err(f"_user_code_execute failed. {e}") return Ok(result_action_object) diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 2943913cf73..9f3e6aa007e 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -653,9 +653,6 @@ def wait( if self.resolved: return self.resolve - if not job_only and self.result is not None: - self.result.wait(timeout) - if api is None: raise ValueError( f"Can't access Syft API. You must login to {self.syft_node_location}" @@ -664,6 +661,8 @@ def wait( counter = 0 while True: self.fetch() + if isinstance(self.result, (SyftError, Err)) or self.status in [JobStatus.ERRORED, JobStatus.INTERRUPTED]: + return self.result if print_warning and self.result is not None: result_obj = api.services.action.get( self.result.id, resolve_nested=False @@ -683,6 +682,10 @@ def wait( counter += 1 if counter > timeout: return SyftError(message="Reached Timeout!") + + if not job_only and self.result is not None: + self.result.wait(timeout) + return self.resolve # type: ignore[unreachable] @property diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index 968e4b7c975..1c6cb8467ec 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -203,6 +203,13 @@ def handle_message_multiprocessing( context=context, user_verify_key=credentials, ) + # import syft as sy + # res = [(x, x.syft_blob_storage_entry_id) if isinstance(x, sy.ActionObject) \ + # else (x, x.private.syft_blob_storage_entry_id) + # for x in context.node.action_store.data.values()] + # import sys + # print("KWARGS", kwargs, kwargs["data"]) + # print(res, file=sys.stderr) result: Any = call_method(context, *queue_item.args, **queue_item.kwargs) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 3ad4b732f89..9e4dab1e540 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -250,23 +250,57 @@ def unwrap_nested_actionobjects(self, data: Any) -> Any: return SyftError(message=f"{res}") else: nested_res = res.syft_action_data - if isinstance(nested_res, ActionObject): - nested_res.syft_node_location = res.syft_node_location - nested_res.syft_client_verify_key = res.syft_client_verify_key + # if isinstance(nested_res, ActionObject): + # nested_res.syft_node_location = res.syft_node_location + # nested_res.syft_client_verify_key = res.syft_client_verify_key return nested_res return data + def contains_nested_actionobjects(self, data: Any): + """ + returns if this is a list/set/dict that contains ActionObjects + """ + def unwrap_collection(col: set | dict | list): + return_values = [] + if isinstance(col, dict): + values = list(col.values()) + list(col.keys()) + else: + values = list(col) + for v in values: + if isinstance(v, list, dict, set): + return_values += unwrap_collection(v) + else: + return_values.append(v) + return return_values + + if isinstance(data, list, dict, set): + values = unwrap_collection(data) + has_action_object = any([isinstance(x, ActionObject) for x in values]) + return has_action_object + return False + def preprocess_action_arg(self, arg: Any) -> None: + """"If the argument is a collection (of collections) of ActionObjects, + We want to flatten the collection and upload a new ActionObject that contains + its values. E.g. [[ActionObject1, ActionObject2],[ActionObject3, ActionObject4]] + -> [[value1, value2],[value3, value4]] + """ res = self.action_service.get(context=self.auth_context, uid=arg) if res.is_err(): return arg action_object = res.ok() data = action_object.syft_action_data - new_data = self.unwrap_nested_actionobjects(data) - new_action_object = ActionObject.from_obj(new_data, id=action_object.id) - res = self.action_service.set( - context=self.auth_context, action_object=new_action_object - ) + if self.contains_nested_actionobjects(data): + new_data = self.unwrap_nested_actionobjects(data) + + new_action_object = ActionObject.from_obj( + new_data, + id=action_object.id, + syft_blob_storage_entry_id=action_object.syft_blob_storage_entry_id, + ) + res = self.action_service.set( + context=self.auth_context, action_object=new_action_object + ) def read_items(self) -> None: while True: From 7b636c5288c72c7195917bc52435fcb01dd805a2 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Thu, 13 Jun 2024 15:12:38 +1000 Subject: [PATCH 119/313] Added scenario stubs --- notebooks/scenarios/bigquery/README.md | 0 notebooks/scenarios/enclaves/README.md | 0 notebooks/scenarios/reverse_tunnel/README.md | 0 notebooks/scenarios/sync/README.md | 0 4 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 notebooks/scenarios/bigquery/README.md create mode 100644 notebooks/scenarios/enclaves/README.md create mode 100644 notebooks/scenarios/reverse_tunnel/README.md create mode 100644 notebooks/scenarios/sync/README.md diff --git a/notebooks/scenarios/bigquery/README.md b/notebooks/scenarios/bigquery/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/notebooks/scenarios/enclaves/README.md b/notebooks/scenarios/enclaves/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/notebooks/scenarios/reverse_tunnel/README.md b/notebooks/scenarios/reverse_tunnel/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/notebooks/scenarios/sync/README.md b/notebooks/scenarios/sync/README.md new file mode 100644 index 00000000000..e69de29bb2d From 7c64367f415f710692b0ca1123797af2a7ef199c Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Thu, 13 Jun 2024 15:13:32 +1000 Subject: [PATCH 120/313] Removed s --- notebooks/scenarios/{enclaves => enclave}/README.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename notebooks/scenarios/{enclaves => enclave}/README.md (100%) diff --git a/notebooks/scenarios/enclaves/README.md b/notebooks/scenarios/enclave/README.md similarity index 100% rename from notebooks/scenarios/enclaves/README.md rename to notebooks/scenarios/enclave/README.md From 53ee373dbb2799ce80d9bc381cc231b7934e792c Mon Sep 17 00:00:00 2001 From: teo Date: Thu, 13 Jun 2024 11:00:30 +0300 Subject: [PATCH 121/313] fix isinstance --- packages/syft/src/syft/service/queue/zmq_queue.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 9e4dab1e540..9d8954be705 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -273,7 +273,7 @@ def unwrap_collection(col: set | dict | list): return_values.append(v) return return_values - if isinstance(data, list, dict, set): + if isinstance(data, (list, dict, set)): values = unwrap_collection(data) has_action_object = any([isinstance(x, ActionObject) for x in values]) return has_action_object From 96edf050e6ecb589f329ec7ed7465418ef1dfedc Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Thu, 13 Jun 2024 12:54:19 +0200 Subject: [PATCH 122/313] update queue handling of nested actionobjects --- .../syft/src/syft/service/queue/zmq_queue.py | 140 +++++++++--------- 1 file changed, 74 insertions(+), 66 deletions(-) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 9e4dab1e540..4391377693e 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -3,6 +3,7 @@ from collections import defaultdict import itertools import socketserver +import sys import threading import time from time import sleep @@ -250,7 +251,8 @@ def unwrap_nested_actionobjects(self, data: Any) -> Any: return SyftError(message=f"{res}") else: nested_res = res.syft_action_data - # if isinstance(nested_res, ActionObject): + if isinstance(nested_res, ActionObject): + raise ValueError("More than double nesting of ActionObjects is currently not supported") # nested_res.syft_node_location = res.syft_node_location # nested_res.syft_client_verify_key = res.syft_client_verify_key return nested_res @@ -267,19 +269,21 @@ def unwrap_collection(col: set | dict | list): else: values = list(col) for v in values: - if isinstance(v, list, dict, set): + if isinstance(v, (list, dict, set)): return_values += unwrap_collection(v) else: return_values.append(v) return return_values - if isinstance(data, list, dict, set): + if isinstance(data, (list, dict, set)): values = unwrap_collection(data) has_action_object = any([isinstance(x, ActionObject) for x in values]) return has_action_object + elif isinstance(data, ActionObject): + return True return False - def preprocess_action_arg(self, arg: Any) -> None: + def preprocess_action_arg(self, arg: UID) -> None: """"If the argument is a collection (of collections) of ActionObjects, We want to flatten the collection and upload a new ActionObject that contains its values. E.g. [[ActionObject1, ActionObject2],[ActionObject3, ActionObject4]] @@ -306,70 +310,74 @@ def read_items(self) -> None: while True: if self._stop.is_set(): break - sleep(1) - - # Items to be queued - items_to_queue = self.queue_stash.get_by_status( - self.queue_stash.partition.root_verify_key, - status=Status.CREATED, - ).ok() - - items_to_queue = [] if items_to_queue is None else items_to_queue - - # Queue Items that are in the processing state - items_processing = self.queue_stash.get_by_status( - self.queue_stash.partition.root_verify_key, - status=Status.PROCESSING, - ).ok() - - items_processing = [] if items_processing is None else items_processing - - for item in itertools.chain(items_to_queue, items_processing): - if item.status == Status.CREATED: - if isinstance(item, ActionQueueItem): - action = item.kwargs["action"] - if self.contains_unresolved_action_objects( - action.args - ) or self.contains_unresolved_action_objects(action.kwargs): - continue - for arg in action.args: - self.preprocess_action_arg(arg) - for _, arg in action.kwargs.items(): - self.preprocess_action_arg(arg) - - msg_bytes = serialize(item, to_bytes=True) - worker_pool = item.worker_pool.resolve_with_context( - self.auth_context - ) - worker_pool = worker_pool.ok() - service_name = worker_pool.name - service: Service | None = self.services.get(service_name) + try: + sleep(1) + + # Items to be queued + items_to_queue = self.queue_stash.get_by_status( + self.queue_stash.partition.root_verify_key, + status=Status.CREATED, + ).ok() + + items_to_queue = [] if items_to_queue is None else items_to_queue + + # Queue Items that are in the processing state + items_processing = self.queue_stash.get_by_status( + self.queue_stash.partition.root_verify_key, + status=Status.PROCESSING, + ).ok() + + items_processing = [] if items_processing is None else items_processing + + for item in itertools.chain(items_to_queue, items_processing): + # TODO: if resolving fails, set queueitem to errored, and jobitem as well + if item.status == Status.CREATED: + if isinstance(item, ActionQueueItem): + action = item.kwargs["action"] + if self.contains_unresolved_action_objects( + action.args + ) or self.contains_unresolved_action_objects(action.kwargs): + continue + for arg in action.args: + self.preprocess_action_arg(arg) + for _, arg in action.kwargs.items(): + self.preprocess_action_arg(arg) + + msg_bytes = serialize(item, to_bytes=True) + worker_pool = item.worker_pool.resolve_with_context( + self.auth_context + ) + worker_pool = worker_pool.ok() + service_name = worker_pool.name + service: Service | None = self.services.get(service_name) - # Skip adding message if corresponding service/pool - # is not registered. - if service is None: - continue + # Skip adding message if corresponding service/pool + # is not registered. + if service is None: + continue - # append request message to the corresponding service - # This list is processed in dispatch method. - - # TODO: Logic to evaluate the CAN RUN Condition - service.requests.append(msg_bytes) - item.status = Status.PROCESSING - res = self.queue_stash.update(item.syft_client_verify_key, item) - if res.is_err(): - logger.error( - "Failed to update queue item={} error={}", - item, - res.err(), - ) - elif item.status == Status.PROCESSING: - # Evaluate Retry condition here - # If job running and timeout or job status is KILL - # or heartbeat fails - # or container id doesn't exists, kill process or container - # else decrease retry count and mark status as CREATED. - pass + # append request message to the corresponding service + # This list is processed in dispatch method. + + # TODO: Logic to evaluate the CAN RUN Condition + service.requests.append(msg_bytes) + item.status = Status.PROCESSING + res = self.queue_stash.update(item.syft_client_verify_key, item) + if res.is_err(): + logger.error( + "Failed to update queue item={} error={}", + item, + res.err(), + ) + elif item.status == Status.PROCESSING: + # Evaluate Retry condition here + # If job running and timeout or job status is KILL + # or heartbeat fails + # or container id doesn't exists, kill process or container + # else decrease retry count and mark status as CREATED. + pass + except Exception as e: + print(e, file=sys.stderr) def run(self) -> None: self.thread = threading.Thread(target=self._run) From 31a676b379c2978820d19b92f1624e4d27db6af5 Mon Sep 17 00:00:00 2001 From: teo Date: Thu, 13 Jun 2024 15:44:28 +0300 Subject: [PATCH 123/313] added queue failing --- packages/syft/src/syft/service/queue/zmq_queue.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 4391377693e..a71ea477a7b 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -378,6 +378,15 @@ def read_items(self) -> None: pass except Exception as e: print(e, file=sys.stderr) + item.status = Status.ERRORED + res = self.queue_stash.update(item.syft_client_verify_key, item) + if res.is_err(): + logger.error( + "Failed to update queue item={} error={}", + item, + res.err(), + ) + def run(self) -> None: self.thread = threading.Thread(target=self._run) From c2857bfc920f1159c2c42aa3469a56b537feda3a Mon Sep 17 00:00:00 2001 From: teo Date: Thu, 13 Jun 2024 15:44:52 +0300 Subject: [PATCH 124/313] fix action permission ALL_READ --- packages/syft/src/syft/store/kv_document_store.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/store/kv_document_store.py b/packages/syft/src/syft/store/kv_document_store.py index b594be92775..02faf120eaa 100644 --- a/packages/syft/src/syft/store/kv_document_store.py +++ b/packages/syft/src/syft/store/kv_document_store.py @@ -289,7 +289,7 @@ def has_permission(self, permission: ActionObjectPermission) -> bool: pass elif ( permission.permission == ActionPermission.READ - and ActionObjectPermission( + or ActionObjectPermission( permission.uid, ActionPermission.ALL_READ ).permission_string in self.permissions[permission.uid] From db933f10e4e57d0136fea486fbd666a4d0b38f53 Mon Sep 17 00:00:00 2001 From: teo Date: Thu, 13 Jun 2024 15:45:10 +0300 Subject: [PATCH 125/313] truncate action object repr and str --- .../src/syft/service/action/action_object.py | 22 ++++++++++--------- .../syft/service/action/action_permissions.py | 6 +++++ .../src/syft/service/action/action_service.py | 5 ++--- .../src/syft/service/action/action_store.py | 2 +- 4 files changed, 21 insertions(+), 14 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 9eed1816b84..01bd2f2c7d0 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -664,6 +664,11 @@ def debox_args_and_kwargs(args: Any, kwargs: Any) -> tuple[Any, Any]: "__table_coll_widths__", ] +def truncate_str(string, length = 100): + if len(string) > length: + repr_data = repr_data[:length] + '... data too long, truncated to 100 characters' + return string + @serializable(without=["syft_pre_hooks__", "syft_post_hooks__"]) class ActionObject(SyncableSyftObject): @@ -834,14 +839,14 @@ def _save_to_blob_storage_(self, data: Any) -> SyftError | None: self.syft_action_data_type = type(data) if inspect.isclass(data): - self.syft_action_data_repr_ = repr_cls(data) + self.syft_action_data_repr_ = truncate_str(repr_cls(data)) else: - self.syft_action_data_repr_ = ( + self.syft_action_data_repr_ = truncate_str( data._repr_markdown_() if hasattr(data, "_repr_markdown_") else data.__repr__() ) - self.syft_action_data_str_ = str(data) + self.syft_action_data_str_ = truncate_str(str(data)) self.syft_has_bool_attr = hasattr(data, "__bool__") else: debug("skipping writing action object to store, passed data was empty.") @@ -863,11 +868,8 @@ def _save_to_blob_storage(self) -> SyftError | None: self._clear_cache() return None - def _clear_cache(self, clear_reprs: bool = False) -> None: + def _clear_cache(self) -> None: self.syft_action_data_cache = self.as_empty_data() - if clear_reprs: - self.syft_action_data_repr_ = "" - self.syft_action_data_str_ = "" @property def is_pointer(self) -> bool: @@ -888,14 +890,14 @@ def __check_action_data(cls, values: dict) -> dict: values["syft_action_data_type"] = type(v) if not isinstance(v, ActionDataEmpty): if inspect.isclass(v): - values["syft_action_data_repr_"] = repr_cls(v) + values["syft_action_data_repr_"] = truncate_str(repr_cls(v)) else: - values["syft_action_data_repr_"] = ( + values["syft_action_data_repr_"] = truncate_str( v._repr_markdown_() if v is not None and hasattr(v, "_repr_markdown_") else v.__repr__() ) - values["syft_action_data_str_"] = str(v) + values["syft_action_data_str_"] = truncate_str(str(v)) values["syft_has_bool_attr"] = hasattr(v, "__bool__") return values diff --git a/packages/syft/src/syft/service/action/action_permissions.py b/packages/syft/src/syft/service/action/action_permissions.py index 2fda8bee2ef..30f1181ccc8 100644 --- a/packages/syft/src/syft/service/action/action_permissions.py +++ b/packages/syft/src/syft/service/action/action_permissions.py @@ -79,6 +79,12 @@ def __init__(self, uid: UID, credentials: SyftVerifyKey): self.credentials = credentials self.permission = ActionPermission.READ +class ActionObjectALLREAD(ActionObjectPermission): + def __init__(self, uid: UID, credentials: SyftVerifyKey): + self.uid = uid + self.credentials = credentials + self.permission = ActionPermission.ALL_READ + class ActionObjectWRITE(ActionObjectPermission): def __init__(self, uid: UID, credentials: SyftVerifyKey): diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 98c77a190a6..d7dd37fa0ea 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -1,5 +1,6 @@ # stdlib import importlib +import sys from typing import Any # third party @@ -115,6 +116,7 @@ def _set( or has_result_read_permission ) + print(f"{has_result_read_permission=}", file=sys.stderr) result = self.store.set( uid=action_object.id, credentials=context.credentials, @@ -341,9 +343,6 @@ def _user_code_execute( if isinstance(result, SyftError): return Err(result.message) filtered_kwargs = result.ok() - import sys - print("BBB", file=sys.stderr) - # print(filtered_kwargs["data"].syft_blob_storage_entry_id, file=sys.stderr) # update input policy to track any input state has_twin_inputs = False diff --git a/packages/syft/src/syft/service/action/action_store.py b/packages/syft/src/syft/service/action/action_store.py index 001aa7a4e0f..faea424a03b 100644 --- a/packages/syft/src/syft/service/action/action_store.py +++ b/packages/syft/src/syft/service/action/action_store.py @@ -22,7 +22,7 @@ from ...types.uid import UID from ..response import SyftSuccess from .action_object import is_action_data_empty -from .action_permissions import ActionObjectEXECUTE +from .action_permissions import ActionObjectALLREAD, ActionObjectEXECUTE from .action_permissions import ActionObjectOWNER from .action_permissions import ActionObjectPermission from .action_permissions import ActionObjectREAD From c50cf24d9ec6314603f1367074a8b6b0ba85a367 Mon Sep 17 00:00:00 2001 From: teo Date: Thu, 13 Jun 2024 15:47:08 +0300 Subject: [PATCH 126/313] cleanup --- .../syft/src/syft/service/action/action_permissions.py | 7 ------- packages/syft/src/syft/service/action/action_service.py | 9 +++------ packages/syft/src/syft/service/action/action_store.py | 2 +- 3 files changed, 4 insertions(+), 14 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_permissions.py b/packages/syft/src/syft/service/action/action_permissions.py index 30f1181ccc8..df4b4344045 100644 --- a/packages/syft/src/syft/service/action/action_permissions.py +++ b/packages/syft/src/syft/service/action/action_permissions.py @@ -79,13 +79,6 @@ def __init__(self, uid: UID, credentials: SyftVerifyKey): self.credentials = credentials self.permission = ActionPermission.READ -class ActionObjectALLREAD(ActionObjectPermission): - def __init__(self, uid: UID, credentials: SyftVerifyKey): - self.uid = uid - self.credentials = credentials - self.permission = ActionPermission.ALL_READ - - class ActionObjectWRITE(ActionObjectPermission): def __init__(self, uid: UID, credentials: SyftVerifyKey): self.uid = uid diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index d7dd37fa0ea..fc147883010 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -1,6 +1,5 @@ # stdlib import importlib -import sys from typing import Any # third party @@ -103,12 +102,12 @@ def _set( if isinstance(action_object, ActionObject): action_object.syft_created_at = DateTime.now() - action_object._clear_cache(clear_reprs=True) + action_object._clear_cache() else: action_object.private_obj.syft_created_at = DateTime.now() # type: ignore[unreachable] action_object.mock_obj.syft_created_at = DateTime.now() - action_object.private_obj._clear_cache(clear_reprs=True) - action_object.mock._clear_cache(clear_reprs=True) + action_object.private_obj._clear_cache() + action_object.mock._clear_cache() # If either context or argument is True, has_result_read_permission is True has_result_read_permission = ( @@ -116,7 +115,6 @@ def _set( or has_result_read_permission ) - print(f"{has_result_read_permission=}", file=sys.stderr) result = self.store.set( uid=action_object.id, credentials=context.credentials, @@ -667,7 +665,6 @@ def execute( if action.action_type == ActionType.CREATEOBJECT: result_action_object = Ok(action.create_object) - # print(action.create_object, "already in blob storage") elif action.action_type == ActionType.SYFTFUNCTION: usercode_service = context.node.get_service("usercodeservice") kwarg_ids = {} diff --git a/packages/syft/src/syft/service/action/action_store.py b/packages/syft/src/syft/service/action/action_store.py index faea424a03b..001aa7a4e0f 100644 --- a/packages/syft/src/syft/service/action/action_store.py +++ b/packages/syft/src/syft/service/action/action_store.py @@ -22,7 +22,7 @@ from ...types.uid import UID from ..response import SyftSuccess from .action_object import is_action_data_empty -from .action_permissions import ActionObjectALLREAD, ActionObjectEXECUTE +from .action_permissions import ActionObjectEXECUTE from .action_permissions import ActionObjectOWNER from .action_permissions import ActionObjectPermission from .action_permissions import ActionObjectREAD From 803790fc899c8aa4bfe4cdbab5356a3683c321f9 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Thu, 13 Jun 2024 15:21:37 +0200 Subject: [PATCH 127/313] fix tests --- .../syft/src/syft/service/code/user_code.py | 4 + .../syft/service/code/user_code_service.py | 6 +- .../src/syft/service/output/output_service.py | 12 +- .../syft/src/syft/service/request/request.py | 6 +- packages/syft/src/syft/types/syft_object.py | 9 ++ .../service/sync/sync_resolve_single_test.py | 120 ++++++++++++++++-- 6 files changed, 133 insertions(+), 24 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 8c636fc4da1..482a82a70dd 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -344,10 +344,12 @@ class UserCode(SyncableSyftObject): "input_owners", "code_status", "worker_pool_name", + "l0_deny_reason", ] __exclude_sync_diff_attrs__: ClassVar[list[str]] = [ "node_uid", + "code_status", "input_policy_type", "input_policy_init_kwargs", "input_policy_state", @@ -429,6 +431,8 @@ def _status_from_output_history( context, self.id, self.user_verify_key ) + if isinstance(is_approved, SyftError): + return is_approved is_denied = self.l0_deny_reason is not None if is_denied: diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 50527102f2e..507225736de 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -91,6 +91,9 @@ def update( code_update: UserCodeUpdate, ) -> SyftSuccess | SyftError: code = self.stash.get_by_uid(context.credentials, code_update.id) + if code.is_err(): + return SyftError(message=code.err()) + code = code.ok() result = self.stash.update(context.credentials, code) if result.is_err(): @@ -521,7 +524,8 @@ def _call( return Err( "Execution denied: Your code is waiting for approval" ) - if has_side or not (is_valid := output_policy._is_valid(context)): + is_valid = output_policy._is_valid(context) # type: ignore + if has_side or not is_valid: if len(output_history) > 0 and not skip_read_cache: last_executed_output = output_history[-1] # Check if the inputs of the last executed output match diff --git a/packages/syft/src/syft/service/output/output_service.py b/packages/syft/src/syft/service/output/output_service.py index 4bafc24090c..30f04b50c42 100644 --- a/packages/syft/src/syft/service/output/output_service.py +++ b/packages/syft/src/syft/service/output/output_service.py @@ -307,26 +307,24 @@ def has_output_read_permissions( self, context: AuthedServiceContext, user_code_id: UID, - code_owner_verify_key: SyftVerifyKey, - ) -> bool: + user_verify_key: SyftVerifyKey, + ) -> bool | SyftError: action_service = context.node.get_service("actionservice") all_outputs = self.get_by_user_code_id(context, user_code_id) if isinstance(all_outputs, SyftError): - print(all_outputs.message) - return False - print("OUTPUTS", all_outputs) + return all_outputs for output in all_outputs: # TODO tech debt: unclear why code owner can see outputhistory without permissions. # It is not a security issue (output history has no data) it is confusing for user # if not self.stash.has_permission( - # ActionObjectREAD(uid=output.id, credentials=code_owner_verify_key) + # ActionObjectREAD(uid=output.id, credentials=user_verify_key) # ): # continue # Check if all output ActionObjects have permissions result_ids = output.output_id_list permissions = [ - ActionObjectREAD(uid=_id, credentials=code_owner_verify_key) + ActionObjectREAD(uid=_id, credentials=user_verify_key) for _id in result_ids ] if action_service.store.has_permissions(permissions): diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 910eaeb9890..2cf05084ae6 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -653,7 +653,7 @@ def deny(self, reason: str) -> SyftSuccess | SyftError: return api if self.code.is_low_side: - result = api.code.update(l0_deny_reason=reason) + result = api.code.update(id=self.code_id, l0_deny_reason=reason) if isinstance(result, SyftError): return result return SyftSuccess(message=f"Request denied with reason: {reason}") @@ -1331,5 +1331,5 @@ def _run( ) ) - def link(self) -> SyftObject | None: - return None + def link(self) -> Any: # type: ignore + return self.code.status diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 4b9f5b6711b..58977d27be1 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -680,6 +680,15 @@ def syft_get_diffs(self, ext_obj: Self) -> list["AttrDiff"]: obj_attr = getattr(self, attr) ext_obj_attr = getattr(ext_obj, attr) + if (obj_attr is None) ^ (ext_obj_attr is None): + # If either attr is None, but not both, we have a diff + # NOTE This clause is needed because attr.__eq__ is not implemented for None, and will eval to True + diff_attr = AttrDiff( + attr_name=attr, + low_attr=obj_attr, + high_attr=ext_obj_attr, + ) + diff_attrs.append(diff_attr) if isinstance(obj_attr, list) and isinstance(ext_obj_attr, list): list_diff = ListDiff.from_lists( attr_name=attr, low_list=obj_attr, high_list=ext_obj_attr diff --git a/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py b/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py index bced841db4a..adc6346fd10 100644 --- a/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py +++ b/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py @@ -9,6 +9,8 @@ from syft.client.syncing import compare_clients from syft.client.syncing import resolve from syft.service.code.user_code import UserCode +from syft.service.job.job_stash import Job +from syft.service.request.request import RequestStatus from syft.service.response import SyftError from syft.service.response import SyftSuccess from syft.service.sync.resolve_widget import ResolveWidget @@ -22,6 +24,9 @@ def handle_decision( return widget.obj_diff_batch.ignore() elif decision in [SyncDecision.LOW, SyncDecision.HIGH]: return widget.click_sync() + elif decision == SyncDecision.SKIP: + # Skip is no-op + return SyftSuccess(message="skipped") else: raise ValueError(f"Unknown decision {decision}") @@ -32,6 +37,7 @@ def compare_and_resolve( to_client: DomainClient, decision: SyncDecision = SyncDecision.LOW, decision_callback: callable = None, + share_private_data: bool = True, ): diff_state_before = compare_clients(from_client, to_client) for obj_diff_batch in diff_state_before.active_batches: @@ -40,7 +46,8 @@ def compare_and_resolve( ) if decision_callback: decision = decision_callback(obj_diff_batch) - widget.click_share_all_private_data() + if share_private_data: + widget.click_share_all_private_data() res = handle_decision(widget, decision) assert isinstance(res, SyftSuccess) from_client.refresh() @@ -49,10 +56,10 @@ def compare_and_resolve( return diff_state_before, diff_state_after -def run_and_accept_result(client): - job_high = client.code.compute(blocking=True) - client.requests[0].accept_by_depositing_result(job_high) - return job_high +def run_and_deposit_result(client): + result = client.code.compute(blocking=True) + job = client.requests[0].deposit_result(result) + return job @syft.syft_function_single_use() @@ -89,7 +96,7 @@ def compute() -> int: assert diff_state_after.is_same - run_and_accept_result(high_client) + run_and_deposit_result(high_client) diff_state_before, diff_state_after = compare_and_resolve( from_client=high_client, to_client=low_client ) @@ -126,7 +133,7 @@ def compute() -> int: assert diff_state_after.is_same - run_and_accept_result(high_client) + run_and_deposit_result(high_client) diff_state_before, diff_state_after = compare_and_resolve( from_client=high_client, to_client=low_client ) @@ -190,15 +197,12 @@ def compute() -> int: from_client=low_client, to_client=high_client ) - run_and_accept_result(high_client) + run_and_deposit_result(high_client) def skip_if_user_code(diff): if diff.root_type is UserCode: return SyncDecision.IGNORE - - raise ValueError( - f"Should not reach here after ignoring user code, got {diff.root_type}" - ) + return SyncDecision.SKIP diff_before, diff_after = compare_and_resolve( from_client=low_client, @@ -206,7 +210,7 @@ def skip_if_user_code(diff): decision_callback=skip_if_user_code, ) assert not diff_before.is_same - assert len(diff_after.batches) == 0 + assert len(diff_after.batches) == 1 def test_request_code_execution_multiple(low_worker, high_worker): @@ -244,3 +248,93 @@ def compute_thrice() -> int: assert not diff_before.is_same assert diff_after.is_same + + +def test_approve_request_on_sync_blocking(low_worker, high_worker): + low_client = low_worker.root_client + client_low_ds = get_ds_client(low_client) + high_client = high_worker.root_client + + @sy.syft_function_single_use() + def compute() -> int: + return 42 + + _ = client_low_ds.code.request_code_execution(compute) + + # No execute permissions + result_error = client_low_ds.code.compute(blocking=True) + assert isinstance(result_error, SyftError) + assert low_client.requests[0].status == RequestStatus.PENDING + + # Sync request to high side + diff_before, diff_after = compare_and_resolve( + from_client=low_client, to_client=high_client + ) + + assert not diff_before.is_same + assert diff_after.is_same + + # Execute on high side + job = run_and_deposit_result(high_client) + assert job.result.get() == 42 + + assert high_client.requests[0].status == RequestStatus.PENDING + + # Sync back to low side, share private data + diff_before, diff_after = compare_and_resolve( + from_client=high_client, to_client=low_client, share_private_data=True + ) + assert len(diff_before.batches) == 1 and diff_before.batches[0].root_type is Job + assert low_client.requests[0].status == RequestStatus.APPROVED + + assert client_low_ds.code.compute().get() == 42 + assert len(client_low_ds.code.compute.jobs) == 1 + # check if user retrieved from cache, instead of re-executing + assert len(client_low_ds.requests[0].code.output_history) == 1 + + +def test_deny_and_sync(low_worker, high_worker): + low_client = low_worker.root_client + client_low_ds = get_ds_client(low_client) + high_client = high_worker.root_client + + @sy.syft_function_single_use() + def compute() -> int: + return 42 + + _ = client_low_ds.code.request_code_execution(compute) + + # No execute permissions + result_error = client_low_ds.code.compute(blocking=True) + assert isinstance(result_error, SyftError) + assert low_client.requests[0].status == RequestStatus.PENDING + + # Deny on low side + request_low = low_client.requests[0] + res = request_low.deny(reason="bad request") + print(res) + assert low_client.requests[0].status == RequestStatus.REJECTED + + # Un-deny. NOTE: not supported by current UX, this is just used to re-deny on high side + low_client.api.code.update(id=request_low.code_id, l0_deny_reason=None) + assert low_client.requests[0].status == RequestStatus.PENDING + + # Sync request to high side + diff_before, diff_after = compare_and_resolve( + from_client=low_client, to_client=high_client + ) + + assert not diff_before.is_same + assert diff_after.is_same + + # Deny on high side + high_client.requests[0].deny(reason="bad request") + assert high_client.requests[0].status == RequestStatus.REJECTED + + diff_before, diff_after = compare_and_resolve( + from_client=high_client, to_client=low_client + ) + + assert diff_after.is_same + + assert low_client.requests[0].status == RequestStatus.REJECTED From c6d3b07ce03951e7f342f64421b76cdcf394538b Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Thu, 13 Jun 2024 15:45:10 +0200 Subject: [PATCH 128/313] fix lint --- packages/syft/src/syft/service/code/user_code.py | 2 +- packages/syft/src/syft/service/code/user_code_service.py | 2 +- packages/syft/src/syft/service/request/request.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 8f0c93d04f8..d87363a0020 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -874,7 +874,7 @@ class UserCodeUpdate(PartialSyftObject): __canonical_name__ = "UserCodeUpdate" __version__ = SYFT_OBJECT_VERSION_1 - l0_deny_reason: str | None = None + l0_deny_reason: str | None @serializable(without=["local_function"]) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 507225736de..c5079e373e0 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -99,7 +99,7 @@ def update( if result.is_err(): return SyftError(message=str(result.err())) - if code_update.l0_deny_reason is not Empty: + if code_update.l0_deny_reason is not Empty: # type: ignore[comparison-overlap] code.l0_deny_reason = code_update.l0_deny_reason result = self.stash.update(context.credentials, code) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index cb5cd4b660b..2778e747225 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -751,8 +751,8 @@ def _create_action_object_for_deposited_result( else: action_object = ActionObject.from_obj( result, - syft_client_verify_key=api.signing_key.verify_key, - syft_node_location=api.node_uid, + syft_client_verify_key=self.syft_client_verify_key, + syft_node_location=self.syft_node_location, ) # Ensure ActionObject exists on this node @@ -1306,7 +1306,7 @@ def link(self) -> SyftObject | None: class SyncedUserCodeStatusChange(UserCodeStatusChange): __canonical_name__ = "SyncedUserCodeStatusChange" __version__ = SYFT_OBJECT_VERSION_3 - linked_obj: LinkedObject | None = None + linked_obj: LinkedObject | None = None # type: ignore @property def approved(self) -> bool: From 46b1c049ada9bac6cca711e086a374880afe72f3 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Thu, 13 Jun 2024 19:01:22 +0200 Subject: [PATCH 129/313] change sync dep direction --- packages/syft/src/syft/client/syncing.py | 2 +- packages/syft/src/syft/service/code/user_code.py | 11 ++++++++--- packages/syft/src/syft/service/request/request.py | 2 -- packages/syft/src/syft/service/sync/diff_state.py | 2 +- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index 25dea439e8f..9f2cfafd85e 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -143,7 +143,7 @@ def handle_sync_batch( obj_diff_batch.decision = decision sync_instructions = [] - for diff in obj_diff_batch.get_dependents(include_roots=True): + for diff in obj_diff_batch.get_dependencies(include_roots=True): # figure out the right verify key to share to # in case of a job with user code, share to user code owner # without user code, share to job owner diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index d87363a0020..b5749b6b591 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -125,6 +125,11 @@ class UserCodeStatusCollection(SyncableSyftObject): __repr_attrs__ = ["approved", "status_dict"] + # if len(output_history): {uid: approved}, + # if denied string is somewhere: {uid: denied} + # else: {uid: pending} + # - the object is completely different for l2/l0 + # - the interface is different (because we need context in backend to get output_history) status_dict: dict[NodeIdentity, tuple[UserCodeStatus, str]] = {} user_code_link: LinkedObject @@ -411,7 +416,7 @@ def user(self) -> UserView | SyftError: ) return api.services.user.get_by_verify_key(self.user_verify_key) - def _status_from_output_history( + def _compute_status_from_output_history( self, context: AuthedServiceContext | None = None ) -> UserCodeStatusCollection | SyftError: if context is None: @@ -458,7 +463,7 @@ def status(self) -> UserCodeStatusCollection | SyftError: return SyftError( message="Encountered a low side UserCode object with a status_link." ) - return self._status_from_output_history() + return self._compute_status_from_output_history() if self.status_link is None: return SyftError( @@ -475,7 +480,7 @@ def get_status( return SyftError( message="Encountered a low side UserCode object with a status_link." ) - return self._status_from_output_history(context) + return self._compute_status_from_output_history(context) if self.status_link is None: return SyftError( message="This UserCode does not have a status. Please contact the Admin." diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 2778e747225..4f6ab2c5ad5 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -866,11 +866,9 @@ def get_sync_dependencies( self, context: AuthedServiceContext ) -> list[UID] | SyftError: dependencies = [] - code_id = self.code_id if isinstance(code_id, SyftError): return code_id - dependencies.append(code_id) return dependencies diff --git a/packages/syft/src/syft/service/sync/diff_state.py b/packages/syft/src/syft/service/sync/diff_state.py index 8781435e220..bb3840cb0bf 100644 --- a/packages/syft/src/syft/service/sync/diff_state.py +++ b/packages/syft/src/syft/service/sync/diff_state.py @@ -817,7 +817,7 @@ def _build_hierarchy_helper( global_diffs=obj_uid_to_diff, global_roots=root_ids, hierarchy_levels=levels, - dependencies=batch_dependencies, + dependencies=obj_dependencies, root_diff=obj_uid_to_diff[root_uid], low_node_uid=low_node_uid, high_node_uid=high_node_uid, From 417adf112e033338351ee69077a94d9f69a376ae Mon Sep 17 00:00:00 2001 From: Spencer Koch Date: Thu, 13 Jun 2024 19:38:49 -0500 Subject: [PATCH 130/313] add existingSecret check for SMTP password --- .../helm/syft/templates/backend/backend-statefulset.yaml | 7 +++++++ packages/grid/helm/syft/values.yaml | 4 +++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml b/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml index 106d2fee893..be0a35d6245 100644 --- a/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml +++ b/packages/grid/helm/syft/templates/backend/backend-statefulset.yaml @@ -109,7 +109,14 @@ spec: - name: SMTP_USERNAME value: {{ .Values.node.smtp.username | quote }} - name: SMTP_PASSWORD + {{- if .Values.node.smtp.existingSecret }} + valueFrom: + secretKeyRef: + name: {{ .Values.node.smtp.existingSecret }} + key: smtpPassword + {{ else }} value: {{ .Values.node.smtp.password | quote }} + {{ end }} - name: EMAIL_SENDER value: {{ .Values.node.smtp.from | quote}} # SeaweedFS diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index 2644eac26e4..631475b2462 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -175,6 +175,8 @@ node: # SMTP Settings smtp: + # Existing secret for SMTP with key 'smtpPassword' + existingSecret: null host: smtp.sendgrid.net port: 587 from: noreply@openmined.org @@ -195,7 +197,7 @@ node: resourcesPreset: xlarge resources: null - # Seaweed secret name. Override this if you want to use a self-managed secret. + # Backend secret name. Override this if you want to use a self-managed secret. # Secret must contain the following keys: # - defaultRootPassword secretKeyName: backend-secret From c0f8014f0ade03de0f86534a817c4a3f172ca645 Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 14 Jun 2024 10:59:46 +0300 Subject: [PATCH 131/313] fixed blob storage permssions for mock --- .../syft/src/syft/service/action/action_object.py | 2 +- .../syft/src/syft/service/action/action_service.py | 11 ++++++++++- packages/syft/src/syft/store/kv_document_store.py | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 01bd2f2c7d0..f1f84141ddb 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -666,7 +666,7 @@ def debox_args_and_kwargs(args: Any, kwargs: Any) -> tuple[Any, Any]: def truncate_str(string, length = 100): if len(string) > length: - repr_data = repr_data[:length] + '... data too long, truncated to 100 characters' + string = string[:length] + '... data too long, truncated to 100 characters' return string diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index fc147883010..22ff9d07c42 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -1,5 +1,6 @@ # stdlib import importlib +import sys from typing import Any # third party @@ -107,7 +108,7 @@ def _set( action_object.private_obj.syft_created_at = DateTime.now() # type: ignore[unreachable] action_object.mock_obj.syft_created_at = DateTime.now() action_object.private_obj._clear_cache() - action_object.mock._clear_cache() + action_object.mock_obj._clear_cache() # If either context or argument is True, has_result_read_permission is True has_result_read_permission = ( @@ -124,6 +125,14 @@ def _set( ) if result.is_ok(): if isinstance(action_object, TwinObject): + if action_object.mock_obj.syft_blob_storage_entry_id is not None: + print(action_object.mock_obj.syft_blob_storage_entry_id, file=sys.stderr) + blob_id = action_object.mock_obj.syft_blob_storage_entry_id + permission = ActionObjectPermission(blob_id, ActionPermission.ALL_READ) + blob_storage_service: AbstractService = context.node.get_service( + BlobStorageService + ) + blob_storage_service.stash.add_permission(permission) if has_result_read_permission: action_object = action_object.private else: diff --git a/packages/syft/src/syft/store/kv_document_store.py b/packages/syft/src/syft/store/kv_document_store.py index 02faf120eaa..b594be92775 100644 --- a/packages/syft/src/syft/store/kv_document_store.py +++ b/packages/syft/src/syft/store/kv_document_store.py @@ -289,7 +289,7 @@ def has_permission(self, permission: ActionObjectPermission) -> bool: pass elif ( permission.permission == ActionPermission.READ - or ActionObjectPermission( + and ActionObjectPermission( permission.uid, ActionPermission.ALL_READ ).permission_string in self.permissions[permission.uid] From 5e8769c91553347052bd8c7e6cb3e2288babd63d Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Fri, 14 Jun 2024 12:07:19 +0300 Subject: [PATCH 132/313] check is_l0_setup for caching results --- packages/syft/src/syft/service/code/user_code_service.py | 4 ++-- packages/syft/tests/syft/users/user_code_test.py | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index c5079e373e0..121a5e2820c 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -588,8 +588,8 @@ def _call( # this currently only works for nested syft_functions # and admins executing on high side (TODO, decide if we want to increment counter) - - if not skip_fill_cache and output_policy is not None or is_high_side: + is_l0_setup = context.role == ServiceRole.ADMIN and is_high_side + if not skip_fill_cache and output_policy is not None or is_l0_setup: res = code.store_execution_output( context=context, outputs=result, diff --git a/packages/syft/tests/syft/users/user_code_test.py b/packages/syft/tests/syft/users/user_code_test.py index 22de6447587..f006525097e 100644 --- a/packages/syft/tests/syft/users/user_code_test.py +++ b/packages/syft/tests/syft/users/user_code_test.py @@ -317,6 +317,7 @@ def compute_sum(): # not approved, mock execution users[-1].allow_mock_execution() result = ds_client.api.services.code.compute_sum() + assert result, result assert result.get() == 1 # approved, no mock execution @@ -328,4 +329,5 @@ def compute_sum(): request.approve() result = ds_client.api.services.code.compute_sum() + assert result, result assert result.get() == 1 From 1e8f6247d1a3e198a690f4bd09cfee9718ed163d Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Fri, 14 Jun 2024 12:21:36 +0300 Subject: [PATCH 133/313] add _get_api to passthrough_attrs --- packages/syft/src/syft/service/action/action_object.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 205a6c8633c..f58de720cc5 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -247,6 +247,7 @@ class ActionObjectPointer: "__repr_str__", # pydantic "__repr_args__", # pydantic "__post_init__", # syft + "_get_api", # syft "__validate_private_attrs__", # syft "id", # syft "to_mongo", # syft 🟡 TODO 23: Add composeable / inheritable object passthrough attrs From fc9a11c489cd239c6398c554a71c73d1206786db Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 14 Jun 2024 12:06:12 +0200 Subject: [PATCH 134/313] isolate set repr --- .../src/syft/service/action/action_object.py | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 92412e30c47..5d501d0d92d 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -319,6 +319,7 @@ class ActionObjectPointer: "syft_eq", # syft "__table_coll_widths__", "_clear_cache", + "_set_reprs", ] dont_wrap_output_attrs = [ "__repr__", @@ -343,6 +344,7 @@ class ActionObjectPointer: "syft_eq", # syft "__table_coll_widths__", "_clear_cache", + "_set_reprs", ] dont_make_side_effects = [ "__repr_attrs__", @@ -365,6 +367,7 @@ class ActionObjectPointer: "syft_eq", # syft "__table_coll_widths__", "_clear_cache", + "_set_reprs", ] action_data_empty_must_run = [ "__repr__", @@ -662,12 +665,15 @@ def debox_args_and_kwargs(args: Any, kwargs: Any) -> tuple[Any, Any]: "_data_repr", "syft_eq", "__table_coll_widths__", + "_set_reprs", ] def truncate_str(string: str, length: int = 100) -> str: - if len(string) > length: - string = string[:length] + "... data too long, truncated to 100 characters" + stringlen = len(string) + if len(stringlen) > length: + n_hidden = stringlen - length + string = f"{string[:length]}... ({n_hidden} characters hidden)" return string @@ -838,16 +844,7 @@ def _save_to_blob_storage_(self, data: Any) -> SyftError | None: print("cannot save to blob storage") self.syft_action_data_type = type(data) - - if inspect.isclass(data): - self.syft_action_data_repr_ = truncate_str(repr_cls(data)) - else: - self.syft_action_data_repr_ = truncate_str( - data._repr_markdown_() - if hasattr(data, "_repr_markdown_") - else data.__repr__() - ) - self.syft_action_data_str_ = truncate_str(str(data)) + self._set_reprs(data) self.syft_has_bool_attr = hasattr(data, "__bool__") else: debug("skipping writing action object to store, passed data was empty.") @@ -856,6 +853,17 @@ def _save_to_blob_storage_(self, data: Any) -> SyftError | None: return None + def _set_reprs(self, data: any) -> None: + if inspect.isclass(data): + self.syft_action_data_repr_ = truncate_str(repr_cls(data)) + else: + self.syft_action_data_repr_ = truncate_str( + data._repr_markdown_() + if hasattr(data, "_repr_markdown_") + else data.__repr__() + ) + self.syft_action_data_str_ = truncate_str(str(data)) + def _save_to_blob_storage(self) -> SyftError | None: data = self.syft_action_data if isinstance(data, SyftError): From 596b426ce9bfa735ac546d45a0e26c001fc3fd87 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 14 Jun 2024 12:09:55 +0200 Subject: [PATCH 135/313] fix truncate --- packages/syft/src/syft/service/action/action_object.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 5d501d0d92d..38737485477 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -671,7 +671,7 @@ def debox_args_and_kwargs(args: Any, kwargs: Any) -> tuple[Any, Any]: def truncate_str(string: str, length: int = 100) -> str: stringlen = len(string) - if len(stringlen) > length: + if stringlen > length: n_hidden = stringlen - length string = f"{string[:length]}... ({n_hidden} characters hidden)" return string From 9da4c871db9216a8517382f4475a1c3e0da8e909 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 14 Jun 2024 12:16:38 +0200 Subject: [PATCH 136/313] passthrough clear cache --- packages/syft/src/syft/service/action/action_object.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 38737485477..f769ff5c6e0 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -665,6 +665,7 @@ def debox_args_and_kwargs(args: Any, kwargs: Any) -> tuple[Any, Any]: "_data_repr", "syft_eq", "__table_coll_widths__", + "_clear_cache", "_set_reprs", ] From efa68f780a565c2ff7c6f500949f16f41e17e5be Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Fri, 14 Jun 2024 14:14:31 +0300 Subject: [PATCH 137/313] improved l0 check --- .../syft/src/syft/service/code/user_code.py | 12 ++++++++--- .../syft/service/code/user_code_service.py | 20 +++++++++++-------- packages/syft/src/syft/types/syft_object.py | 2 +- 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index d87363a0020..cacf1aa3db3 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -285,7 +285,8 @@ class UserCodeV4(SyncableSyftObject): input_kwargs: list[str] enclave_metadata: EnclaveMetadata | None = None submit_time: DateTime | None = None - uses_domain: bool = False # tracks if the code calls domain.something, variable is set during parsing + # tracks if the code calls domain.something, variable is set during parsing + uses_domain: bool = False nested_codes: dict[str, tuple[LinkedObject, dict]] | None = {} worker_pool_name: str | None = None @@ -316,7 +317,9 @@ class UserCode(SyncableSyftObject): input_kwargs: list[str] enclave_metadata: EnclaveMetadata | None = None submit_time: DateTime | None = None - uses_domain: bool = False # tracks if the code calls domain.something, variable is set during parsing + # tracks if the code calls domain.something, variable is set during parsing + uses_domain: bool = False + nested_codes: dict[str, tuple[LinkedObject, dict]] | None = {} worker_pool_name: str | None = None origin_node_side_type: NodeSideType @@ -985,7 +988,10 @@ def _ephemeral_node_call( n_consumers=n_consumers, deploy_to="python", ) - ep_client = ep_node.login(email="info@openmined.org", password="changethis") # nosec + ep_client = ep_node.login( + email="info@openmined.org", + password="changethis", + ) # nosec self.input_policy_init_kwargs = cast(dict, self.input_policy_init_kwargs) for node_id, obj_dict in self.input_policy_init_kwargs.items(): # api = APIRegistry.api_for( diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 121a5e2820c..b6970d31a03 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -503,10 +503,7 @@ def _call( input_policy = code.get_input_policy(context) # relative - from ...node.node import get_node_side_type - is_high_side = get_node_side_type() == "high" - has_side = get_node_side_type() is not None # Check output policy if not override_execution_permission: output_history = code.get_output_history(context=context) @@ -520,12 +517,18 @@ def _call( output_policy=output_policy, ) if not can_execute: - if not has_side and not code.is_output_policy_approved(context): + # We don't check output policy if + # code is from low side (L0 setup) + if not code.is_low_side and not code.is_output_policy_approved( + context + ): return Err( "Execution denied: Your code is waiting for approval" ) - is_valid = output_policy._is_valid(context) # type: ignore - if has_side or not is_valid: + is_valid = ( + output_policy._is_valid(context) if output_policy else False + ) + if not is_valid or code.is_low_side: if len(output_history) > 0 and not skip_read_cache: last_executed_output = output_history[-1] # Check if the inputs of the last executed output match @@ -588,8 +591,9 @@ def _call( # this currently only works for nested syft_functions # and admins executing on high side (TODO, decide if we want to increment counter) - is_l0_setup = context.role == ServiceRole.ADMIN and is_high_side - if not skip_fill_cache and output_policy is not None or is_l0_setup: + # always store_execution_output on l0 setup + is_l0_request = context.role == ServiceRole.ADMIN and code.is_low_side + if not skip_fill_cache and output_policy is not None or is_l0_request: res = code.store_execution_output( context=context, outputs=result, diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 58977d27be1..e4daf3a779f 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -37,6 +37,7 @@ from ..node.credentials import SyftVerifyKey from ..serde.recursive_primitives import recursive_serde_register_type from ..serde.serialize import _serialize as serialize +from ..service.response import SyftError from ..util.autoreload import autoreload_enabled from ..util.markdown import as_markdown_python_code from ..util.notebook_ui.components.tabulator_template import build_tabulator_table @@ -50,7 +51,6 @@ if TYPE_CHECKING: # relative from ..client.api import SyftAPI - from ..service.response import SyftError from ..service.sync.diff_state import AttrDiff IntStr = int | str From f856c42eaca02573502830651be4aaef9fd78fa1 Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 14 Jun 2024 14:16:45 +0300 Subject: [PATCH 138/313] added blob entry id check on clear cache --- packages/syft/src/syft/service/action/action_object.py | 3 ++- packages/syft/src/syft/service/action/action_permissions.py | 1 + packages/syft/src/syft/service/queue/zmq_queue.py | 3 ++- .../syft/tests/syft/request/request_code_accept_deny_test.py | 5 +++-- .../syft/tests/syft/service/action/action_service_test.py | 2 ++ 5 files changed, 10 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index f769ff5c6e0..c9d24e31159 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -879,7 +879,8 @@ def _save_to_blob_storage(self) -> SyftError | None: return None def _clear_cache(self) -> None: - self.syft_action_data_cache = self.as_empty_data() + if self.syft_blob_storage_entry_id is not None: + self.syft_action_data_cache = self.as_empty_data() @property def is_pointer(self) -> bool: diff --git a/packages/syft/src/syft/service/action/action_permissions.py b/packages/syft/src/syft/service/action/action_permissions.py index df4b4344045..2fda8bee2ef 100644 --- a/packages/syft/src/syft/service/action/action_permissions.py +++ b/packages/syft/src/syft/service/action/action_permissions.py @@ -79,6 +79,7 @@ def __init__(self, uid: UID, credentials: SyftVerifyKey): self.credentials = credentials self.permission = ActionPermission.READ + class ActionObjectWRITE(ActionObjectPermission): def __init__(self, uid: UID, credentials: SyftVerifyKey): self.uid = uid diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 0309f9b3f9d..decc3fd2e79 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -286,7 +286,7 @@ def unwrap_collection(col: set | dict | list) -> [Any]: # type: ignore return True return False - def preprocess_action_arg(self, arg: UID) -> None: + def preprocess_action_arg(self, arg: UID) -> UID | None: """ "If the argument is a collection (of collections) of ActionObjects, We want to flatten the collection and upload a new ActionObject that contains its values. E.g. [[ActionObject1, ActionObject2],[ActionObject3, ActionObject4]] @@ -308,6 +308,7 @@ def preprocess_action_arg(self, arg: UID) -> None: res = self.action_service.set( context=self.auth_context, action_object=new_action_object ) + return None def read_items(self) -> None: while True: diff --git a/packages/syft/tests/syft/request/request_code_accept_deny_test.py b/packages/syft/tests/syft/request/request_code_accept_deny_test.py index e84f0360b12..4680c39fe63 100644 --- a/packages/syft/tests/syft/request/request_code_accept_deny_test.py +++ b/packages/syft/tests/syft/request/request_code_accept_deny_test.py @@ -81,8 +81,9 @@ def test_action_store_change(faker: Faker, worker: Worker): dummy_data = [1, 2, 3] data = ActionObject.from_obj(dummy_data) action_obj = root_client.api.services.action.set(data) - - assert action_obj.get() == dummy_data + print(action_obj) + action_obj.reload_cache() + assert action_obj.syft_action_data == dummy_data ds_client = get_ds_client(faker, root_client, worker.guest_client) diff --git a/packages/syft/tests/syft/service/action/action_service_test.py b/packages/syft/tests/syft/service/action/action_service_test.py index e4d9b663500..f49c989d9fe 100644 --- a/packages/syft/tests/syft/service/action/action_service_test.py +++ b/packages/syft/tests/syft/service/action/action_service_test.py @@ -19,6 +19,8 @@ def test_action_service_sanity(worker): obj = ActionObject.from_obj("abc") pointer = service.set(get_auth_ctx(worker), obj).ok() + print(pointer) + pointer = pointer.get() assert len(service.store.data) == 1 res = pointer.capitalize() From e74248e5358502c77afe70df8544f64778235b69 Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 14 Jun 2024 14:24:35 +0300 Subject: [PATCH 139/313] added ignore unreachable --- packages/syft/src/syft/service/job/job_stash.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 591b49d31a3..8faa3a3d90a 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -680,7 +680,7 @@ def wait( ]: return self.result if print_warning and self.result is not None: - result_obj = api.services.action.get( + result_obj = api.services.action.get( # type: ignore[unreachable] self.result.id, resolve_nested=False ) if result_obj.is_link and job_only: From 9fb0746619b85624d2c5a6576e9535825d0e331e Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 14 Jun 2024 14:37:59 +0300 Subject: [PATCH 140/313] fix capitalize test --- packages/syft/tests/syft/service/action/action_service_test.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/syft/tests/syft/service/action/action_service_test.py b/packages/syft/tests/syft/service/action/action_service_test.py index f49c989d9fe..e4d9b663500 100644 --- a/packages/syft/tests/syft/service/action/action_service_test.py +++ b/packages/syft/tests/syft/service/action/action_service_test.py @@ -19,8 +19,6 @@ def test_action_service_sanity(worker): obj = ActionObject.from_obj("abc") pointer = service.set(get_auth_ctx(worker), obj).ok() - print(pointer) - pointer = pointer.get() assert len(service.store.data) == 1 res = pointer.capitalize() From 6eb9ab82350119291325be0c3d08c7a400992b44 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Fri, 14 Jun 2024 15:11:02 +0300 Subject: [PATCH 141/313] fix None case for code --- .../syft/src/syft/service/request/request.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 2778e747225..6bf53f28669 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -574,7 +574,7 @@ def icon(self) -> str: @property def status(self) -> RequestStatus: - if self.code.is_low_side: + if self.is_low_side: code_status = self.code.status return RequestStatus.from_usercode_status(code_status) @@ -601,7 +601,7 @@ def approve( if isinstance(api, SyftError): return api - if self.code.is_low_side: + if self.is_low_side: return SyftError( message="This request is a low-side request. Please sync your results to approve." ) @@ -652,7 +652,7 @@ def deny(self, reason: str) -> SyftSuccess | SyftError: if isinstance(api, SyftError): return api - if self.code.is_low_side: + if self.is_low_side: result = api.code.update(id=self.code_id, l0_deny_reason=reason) if isinstance(result, SyftError): return result @@ -660,8 +660,12 @@ def deny(self, reason: str) -> SyftSuccess | SyftError: return api.services.request.undo(uid=self.id, reason=reason) + @property + def is_low_side(self) -> bool: + return bool(self.code) and self.code.is_low_side + def approve_with_client(self, client: SyftClient) -> Result[SyftSuccess, SyftError]: - if self.code.is_low_side: + if self.is_low_side: return SyftError( message="This request is a low-side request. Please sync your results to approve." ) @@ -824,9 +828,10 @@ def deposit_result( if isinstance(code, SyftError): return code - if not self.code.is_low_side: + if not self.is_low_side: return SyftError( - message="deposit_result is only available for low side requests. Please use request.approve() instead." + message="deposit_result is only available for low side code requests. " + "Please use request.approve() instead." ) # Create ActionObject From 1a6f68a147f0a03abde59800b3dcb639bae3fdc3 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 14 Jun 2024 14:40:12 +0200 Subject: [PATCH 142/313] fix tests --- .../src/syft/service/action/action_object.py | 6 +----- .../syft/src/syft/service/code/user_code.py | 2 +- packages/syft/src/syft/types/twin_object.py | 9 +++++++++ packages/syft/tests/syft/action_test.py | 2 +- packages/syft/tests/syft/eager_test.py | 19 ++++++++++--------- .../request/request_code_accept_deny_test.py | 2 +- .../syft/service/action/action_object_test.py | 4 ++-- .../service/action/action_service_test.py | 4 ++-- 8 files changed, 27 insertions(+), 21 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index f769ff5c6e0..2039513affc 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -751,11 +751,7 @@ def _set_obj_location_(self, node_uid: UID, credentials: SyftVerifyKey) -> None: @property def syft_action_data(self) -> Any: - if ( - self.syft_blob_storage_entry_id - and self.syft_created_at - and not TraceResultRegistry.current_thread_is_tracing() - ): + if self.syft_blob_storage_entry_id and self.syft_created_at: self.reload_cache() return self.syft_action_data_cache diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 832379fb869..640d49962b8 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -895,7 +895,7 @@ def _ephemeral_node_call( syft_node_location=node_id.node_id, syft_client_verify_key=node_id.verify_key, ) - res = ep_client.api.services.action.set(new_obj) + res = new_obj.send(ep_client) if isinstance(res, SyftError): return res diff --git a/packages/syft/src/syft/types/twin_object.py b/packages/syft/src/syft/types/twin_object.py index 2ab0a4780f3..3bfd47c3d71 100644 --- a/packages/syft/src/syft/types/twin_object.py +++ b/packages/syft/src/syft/types/twin_object.py @@ -11,6 +11,7 @@ from typing_extensions import Self # relative +from ..client.client import SyftClient from ..serde.serializable import serializable from ..service.action.action_object import ActionObject from ..service.action.action_object import TwinMode @@ -95,3 +96,11 @@ def _save_to_blob_storage(self) -> SyftError | None: if isinstance(mock_store_res, SyftError): return mock_store_res return self.private_obj._save_to_blob_storage() + + def send(self, client: SyftClient, add_storage_permission: bool = True) -> Any: + self._set_obj_location_(client.id, client.verify_key) + self._save_to_blob_storage() + res = client.api.services.action.set( + self, add_storage_permission=add_storage_permission + ) + return res diff --git a/packages/syft/tests/syft/action_test.py b/packages/syft/tests/syft/action_test.py index c28f5c31615..1f4dc0cc36b 100644 --- a/packages/syft/tests/syft/action_test.py +++ b/packages/syft/tests/syft/action_test.py @@ -17,7 +17,7 @@ def test_actionobject_method(worker): assert root_domain_client.settings.enable_eager_execution(enable=True) action_store = worker.get_service("actionservice").store obj = ActionObject.from_obj("abc") - pointer = root_domain_client.api.services.action.set(obj) + pointer = obj.send(root_domain_client) assert len(action_store.data) == 1 res = pointer.capitalize() assert len(action_store.data) == 2 diff --git a/packages/syft/tests/syft/eager_test.py b/packages/syft/tests/syft/eager_test.py index a597a79b2d6..1cbfd11e55f 100644 --- a/packages/syft/tests/syft/eager_test.py +++ b/packages/syft/tests/syft/eager_test.py @@ -20,11 +20,11 @@ def test_eager_permissions(worker, guest_client): mock_obj=np.array([[1, 1, 1], [1, 1, 1]]), ) - input_ptr = root_domain_client.api.services.action.set(input_obj) + input_ptr = input_obj.send(root_domain_client) pointer = guest_client.api.services.action.get_pointer(input_ptr.id) - input_ptr = root_domain_client.api.services.action.set(input_obj) + input_ptr = input_obj.send(root_domain_client) pointer = guest_client.api.services.action.get_pointer(input_ptr.id) @@ -53,8 +53,9 @@ def my_plan(x=np.array([[2, 2, 2], [2, 2, 2]])): # noqa: B008 mock_obj=np.array([[1, 1, 1], [1, 1, 1]]), ) - input_obj = root_domain_client.api.services.action.set(input_obj) - pointer = guest_client.api.services.action.get_pointer(input_obj.id) + input_ptr = input_obj.send(root_domain_client) + + pointer = guest_client.api.services.action.get_pointer(input_ptr.id) res_ptr = plan_ptr(x=pointer) # guest cannot access result @@ -117,7 +118,7 @@ def my_plan(x=np.array([1, 2, 3, 4, 5, 6])): # noqa: B008 private_obj=np.array([1, 2, 3, 4, 5, 6]), mock_obj=np.array([1, 1, 1, 1, 1, 1]) ) - _id = root_domain_client.api.services.action.set(input_obj).id + _id = input_obj.send(root_domain_client).id pointer = guest_client.api.services.action.get_pointer(_id) res_ptr = plan_ptr(x=pointer) @@ -142,7 +143,7 @@ def test_setattribute(worker, guest_client): assert private_data.dtype != np.int32 - obj_pointer = root_domain_client.api.services.action.set(obj) + obj_pointer = obj.send(root_domain_client) obj_pointer = guest_client.api.services.action.get_pointer(obj_pointer.id) original_id = obj_pointer.id @@ -177,7 +178,7 @@ def test_getattribute(worker, guest_client): mock_obj=np.array([[1, 1, 1], [1, 1, 1]]), ) - obj_pointer = root_domain_client.api.services.action.set(obj) + obj_pointer = obj.send(root_domain_client) obj_pointer = guest_client.api.services.action.get_pointer(obj_pointer.id) size_pointer = obj_pointer.size @@ -196,7 +197,7 @@ def test_eager_method(worker, guest_client): mock_obj=np.array([[1, 1, 1], [1, 1, 1]]), ) - obj_pointer = root_domain_client.api.services.action.set(obj) + obj_pointer = obj.send(root_domain_client) obj_pointer = guest_client.api.services.action.get_pointer(obj_pointer.id) flat_pointer = obj_pointer.flatten() @@ -219,7 +220,7 @@ def test_eager_dunder_method(worker, guest_client): mock_obj=np.array([[1, 1, 1], [1, 1, 1]]), ) - obj_pointer = root_domain_client.api.services.action.set(obj) + obj_pointer = obj.send(root_domain_client) obj_pointer = guest_client.api.services.action.get_pointer(obj_pointer.id) first_row_pointer = obj_pointer[0] diff --git a/packages/syft/tests/syft/request/request_code_accept_deny_test.py b/packages/syft/tests/syft/request/request_code_accept_deny_test.py index e84f0360b12..9e63ea592a8 100644 --- a/packages/syft/tests/syft/request/request_code_accept_deny_test.py +++ b/packages/syft/tests/syft/request/request_code_accept_deny_test.py @@ -80,7 +80,7 @@ def test_action_store_change(faker: Faker, worker: Worker): root_client = worker.root_client dummy_data = [1, 2, 3] data = ActionObject.from_obj(dummy_data) - action_obj = root_client.api.services.action.set(data) + action_obj = data.send(root_client) assert action_obj.get() == dummy_data diff --git a/packages/syft/tests/syft/service/action/action_object_test.py b/packages/syft/tests/syft/service/action/action_object_test.py index a595fdd0e8d..dd7351f78e4 100644 --- a/packages/syft/tests/syft/service/action/action_object_test.py +++ b/packages/syft/tests/syft/service/action/action_object_test.py @@ -33,8 +33,8 @@ def helper_make_action_obj(orig_obj: Any): def helper_make_action_pointers(worker, obj, *args, **kwargs): root_domain_client = worker.root_client - root_domain_client.api.services.action.set(obj) - obj_pointer = root_domain_client.api.services.action.get_pointer(obj.id) + res = obj.send(root_domain_client) + obj_pointer = root_domain_client.api.services.action.get_pointer(res.id) # The args and kwargs should automatically be pointerized by obj_pointer return obj_pointer, args, kwargs diff --git a/packages/syft/tests/syft/service/action/action_service_test.py b/packages/syft/tests/syft/service/action/action_service_test.py index e4d9b663500..0c262f839a0 100644 --- a/packages/syft/tests/syft/service/action/action_service_test.py +++ b/packages/syft/tests/syft/service/action/action_service_test.py @@ -15,10 +15,10 @@ def get_auth_ctx(worker): def test_action_service_sanity(worker): service = worker.get_service("actionservice") + root_domain_client = worker.root_client obj = ActionObject.from_obj("abc") - - pointer = service.set(get_auth_ctx(worker), obj).ok() + pointer = obj.send(root_domain_client) assert len(service.store.data) == 1 res = pointer.capitalize() From 8daa0dbf1538d6623c555d58e6a70eee336cf6ac Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 14 Jun 2024 14:51:37 +0200 Subject: [PATCH 143/313] remove faulty line --- packages/syft/src/syft/service/action/action_object.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index bdb3a4de933..2039513affc 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -875,8 +875,7 @@ def _save_to_blob_storage(self) -> SyftError | None: return None def _clear_cache(self) -> None: - if self.syft_blob_storage_entry_id is not None: - self.syft_action_data_cache = self.as_empty_data() + self.syft_action_data_cache = self.as_empty_data() @property def is_pointer(self) -> bool: From 973cca963ccc1299be81074bd855deb9ae1c263b Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 14 Jun 2024 15:36:21 +0200 Subject: [PATCH 144/313] fix bugs --- .../7ad602bd16c3479bb6058fa0864ac9f2.json | 278 ++++++++++++++++++ .../8b186d7594d0485b90f95f6c95e35a25.json | 278 ++++++++++++++++++ .../8e0f05e65fec4fe7bbd364ac930dfa74.json | 14 + .../cc424e329ef6476faf5d77d2851589ed.json | 278 ++++++++++++++++++ .../d06611a4a0de458d9257e04c975487e0.json | 278 ++++++++++++++++++ .../ed92a248b88e4484b6937cd58d4ad7c1.json | 0 .../f1c5b5baa491435fab432d198567ed7a.json | 278 ++++++++++++++++++ .../src/syft/service/action/action_service.py | 48 ++- .../syft/src/syft/service/code/user_code.py | 8 +- 9 files changed, 1445 insertions(+), 15 deletions(-) create mode 100644 packages/syft/src/syft/protocol/7ad602bd16c3479bb6058fa0864ac9f2.json create mode 100644 packages/syft/src/syft/protocol/8b186d7594d0485b90f95f6c95e35a25.json create mode 100644 packages/syft/src/syft/protocol/8e0f05e65fec4fe7bbd364ac930dfa74.json create mode 100644 packages/syft/src/syft/protocol/cc424e329ef6476faf5d77d2851589ed.json create mode 100644 packages/syft/src/syft/protocol/d06611a4a0de458d9257e04c975487e0.json create mode 100644 packages/syft/src/syft/protocol/ed92a248b88e4484b6937cd58d4ad7c1.json create mode 100644 packages/syft/src/syft/protocol/f1c5b5baa491435fab432d198567ed7a.json diff --git a/packages/syft/src/syft/protocol/7ad602bd16c3479bb6058fa0864ac9f2.json b/packages/syft/src/syft/protocol/7ad602bd16c3479bb6058fa0864ac9f2.json new file mode 100644 index 00000000000..375aa1af66b --- /dev/null +++ b/packages/syft/src/syft/protocol/7ad602bd16c3479bb6058fa0864ac9f2.json @@ -0,0 +1,278 @@ +{ + "1": { + "release_name": "0.8.2.json" + }, + "2": { + "release_name": "0.8.3.json" + }, + "3": { + "release_name": "0.8.4.json" + }, + "4": { + "release_name": "0.8.6.json" + }, + "dev": { + "object_versions": { + "NodeMetadata": { + "5": { + "version": 5, + "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8", + "action": "add" + } + }, + "SyftAPI": { + "3": { + "version": 3, + "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a", + "action": "add" + } + }, + "HTMLObject": { + "1": { + "version": 1, + "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863", + "action": "add" + } + }, + "NodeSettingsUpdate": { + "2": { + "version": 2, + "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0", + "action": "add" + }, + "5": { + "version": 5, + "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e", + "action": "add" + } + }, + "NodeSettings": { + "3": { + "version": 3, + "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1", + "action": "remove" + }, + "5": { + "version": 5, + "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e", + "action": "add" + }, + "6": { + "version": 6, + "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403", + "action": "add" + } + }, + "BlobRetrievalByURL": { + "5": { + "version": 5, + "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3", + "action": "add" + } + }, + "EnclaveMetadata": { + "2": { + "version": 2, + "hash": "6dcc26695abc6a9ecd9d7d1e6507a9f1a92cc5ccd10987e92419bf984245f9a1", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "d2f23411927c68e2307a84d180ad053b3e4ba12d74aba64d34dac224c90e815d", + "action": "add" + } + }, + "CustomEndpointActionObject": { + "1": { + "version": 1, + "hash": "642facc6cafbaad4de030a33cd619bd68ac31a32b0db07ddc1c1d5d7f914503e", + "action": "add" + } + }, + "JobItem": { + "4": { + "version": 4, + "hash": "6a7cc7c2bb4dd234c1508b0af4d3b403cd3b7b427578a775bf80dc36891923ed", + "action": "remove" + }, + "6": { + "version": 6, + "hash": "865a2ed791b8abd20d76e9a6bfae7ae7dad51b5ebfd8ff728aab25af93fa5570", + "action": "add" + } + }, + "ExecutionOutput": { + "1": { + "version": 1, + "hash": "c2337099eba14767ead75fcc1b1fa265c1898461ede0b5e7758a0e8d11d1757d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "854fe9df5bcbb5c7e5b7c467bac423cd98c32f93d6876fea7b8eb6c08f6596da", + "action": "add" + } + }, + "CreateCustomImageChange": { + "3": { + "version": 3, + "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995", + "action": "add" + } + }, + "TwinAPIContextView": { + "1": { + "version": 1, + "hash": "7d368102d0b473009af3b8c46e0ea6d35893c5ebb172b373ad7d52553c12c9fa", + "action": "add" + } + }, + "CustomAPIView": { + "1": { + "version": 1, + "hash": "0b9afdd554f0b353c07256e2522342be1302b395d649f1cbabc555e5baecb150", + "action": "add" + } + }, + "CustomApiEndpoint": { + "1": { + "version": 1, + "hash": "13617f3dce60fa692421e0d9deda7ffd365ec02d4a062c18510b48457b6eba02", + "action": "add" + } + }, + "PrivateAPIEndpoint": { + "1": { + "version": 1, + "hash": "004ec19753263440e2896b4e35d7a6305322934512f473f37d54043af5726fe6", + "action": "add" + } + }, + "PublicAPIEndpoint": { + "1": { + "version": 1, + "hash": "5589b6bdd045ee9c45987dae78fd5a1124530a6c493e2328b304d9273b75177f", + "action": "add" + } + }, + "UpdateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "6d8effd404f15d4378b1ff3382e0622b9e5a637d9db342d43cfec00fe29c649a", + "action": "add" + } + }, + "CreateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "55e0a7b0ac428a6abb771ffcb925ee79cdd752a4b83058aa4b71fbef2a9fee63", + "action": "add" + } + }, + "TwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "e538734d20be3b477e188eb91f66600c2e654bb32e34806ef24329e48238bf18", + "action": "add" + } + }, + "SyftLog": { + "3": { + "version": 3, + "hash": "8964d48238672e0e5d5db6b932cda4ee8eb77581949ab3f7a38a05b1efec13b7", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "ad6ef18ccd87fced669f3824d27ab423aaf52574b0cd4f720687aeaba77524e5", + "action": "add" + } + }, + "SyncState": { + "1": { + "version": 1, + "hash": "a0616775ec8ef0629e2d91e0df9cc4237ea3674727eda1ce367f1897ee35767d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "925f1b8ccd4b9d542700a111f9c4bdd28bfa55978d805ddb2fb3c108cc940d19", + "action": "add" + }, + "3": { + "version": 3, + "hash": "1b5fd28919cb496f8073a64a57736d477ace1ed969962b1b049cecf766f2661c", + "action": "add" + } + }, + "NodePeer": { + "3": { + "version": 3, + "hash": "ec0e39fc77ddb542558519d6a1f7c55f41cc037b6312792333792a04feea57e6", + "action": "add" + } + }, + "AssociationRequestChange": { + "1": { + "version": 1, + "hash": "508550c43e9e3f30243813c23eb6eec20918923d7ba09498cddbcd7e8bfa4539", + "action": "add" + } + }, + "APIEndpointQueueItem": { + "1": { + "version": 1, + "hash": "d31b2edfb767401c810584baccd27e4f566181c3ef7706618a82eb25ae20ff6d", + "action": "add" + } + }, + "NodeMetadataUpdate": { + "2": { + "version": 2, + "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed", + "action": "remove" + } + }, + "SyncStateItem": { + "1": { + "version": 1, + "hash": "4dbfa0813f5a3f7be0b36249ff2d67e395ad7c9e138c5a122fc7342b8dcc4b92", + "action": "remove" + } + }, + "VeilidConnection": { + "1": { + "version": 1, + "hash": "c1796e7b01c9eae0dbf59cfd5c2c2f0e7eba593e0cea615717246572b27aae4b", + "action": "remove" + } + }, + "CreateCustomWorkerPoolChange": { + "3": { + "version": 3, + "hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c", + "action": "add" + } + }, + "NodePeerUpdate": { + "1": { + "version": 1, + "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9", + "action": "add" + } + }, + "JobInfo": { + "2": { + "version": 2, + "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", + "action": "remove" + } + } + } + } +} diff --git a/packages/syft/src/syft/protocol/8b186d7594d0485b90f95f6c95e35a25.json b/packages/syft/src/syft/protocol/8b186d7594d0485b90f95f6c95e35a25.json new file mode 100644 index 00000000000..375aa1af66b --- /dev/null +++ b/packages/syft/src/syft/protocol/8b186d7594d0485b90f95f6c95e35a25.json @@ -0,0 +1,278 @@ +{ + "1": { + "release_name": "0.8.2.json" + }, + "2": { + "release_name": "0.8.3.json" + }, + "3": { + "release_name": "0.8.4.json" + }, + "4": { + "release_name": "0.8.6.json" + }, + "dev": { + "object_versions": { + "NodeMetadata": { + "5": { + "version": 5, + "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8", + "action": "add" + } + }, + "SyftAPI": { + "3": { + "version": 3, + "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a", + "action": "add" + } + }, + "HTMLObject": { + "1": { + "version": 1, + "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863", + "action": "add" + } + }, + "NodeSettingsUpdate": { + "2": { + "version": 2, + "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0", + "action": "add" + }, + "5": { + "version": 5, + "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e", + "action": "add" + } + }, + "NodeSettings": { + "3": { + "version": 3, + "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1", + "action": "remove" + }, + "5": { + "version": 5, + "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e", + "action": "add" + }, + "6": { + "version": 6, + "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403", + "action": "add" + } + }, + "BlobRetrievalByURL": { + "5": { + "version": 5, + "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3", + "action": "add" + } + }, + "EnclaveMetadata": { + "2": { + "version": 2, + "hash": "6dcc26695abc6a9ecd9d7d1e6507a9f1a92cc5ccd10987e92419bf984245f9a1", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "d2f23411927c68e2307a84d180ad053b3e4ba12d74aba64d34dac224c90e815d", + "action": "add" + } + }, + "CustomEndpointActionObject": { + "1": { + "version": 1, + "hash": "642facc6cafbaad4de030a33cd619bd68ac31a32b0db07ddc1c1d5d7f914503e", + "action": "add" + } + }, + "JobItem": { + "4": { + "version": 4, + "hash": "6a7cc7c2bb4dd234c1508b0af4d3b403cd3b7b427578a775bf80dc36891923ed", + "action": "remove" + }, + "6": { + "version": 6, + "hash": "865a2ed791b8abd20d76e9a6bfae7ae7dad51b5ebfd8ff728aab25af93fa5570", + "action": "add" + } + }, + "ExecutionOutput": { + "1": { + "version": 1, + "hash": "c2337099eba14767ead75fcc1b1fa265c1898461ede0b5e7758a0e8d11d1757d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "854fe9df5bcbb5c7e5b7c467bac423cd98c32f93d6876fea7b8eb6c08f6596da", + "action": "add" + } + }, + "CreateCustomImageChange": { + "3": { + "version": 3, + "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995", + "action": "add" + } + }, + "TwinAPIContextView": { + "1": { + "version": 1, + "hash": "7d368102d0b473009af3b8c46e0ea6d35893c5ebb172b373ad7d52553c12c9fa", + "action": "add" + } + }, + "CustomAPIView": { + "1": { + "version": 1, + "hash": "0b9afdd554f0b353c07256e2522342be1302b395d649f1cbabc555e5baecb150", + "action": "add" + } + }, + "CustomApiEndpoint": { + "1": { + "version": 1, + "hash": "13617f3dce60fa692421e0d9deda7ffd365ec02d4a062c18510b48457b6eba02", + "action": "add" + } + }, + "PrivateAPIEndpoint": { + "1": { + "version": 1, + "hash": "004ec19753263440e2896b4e35d7a6305322934512f473f37d54043af5726fe6", + "action": "add" + } + }, + "PublicAPIEndpoint": { + "1": { + "version": 1, + "hash": "5589b6bdd045ee9c45987dae78fd5a1124530a6c493e2328b304d9273b75177f", + "action": "add" + } + }, + "UpdateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "6d8effd404f15d4378b1ff3382e0622b9e5a637d9db342d43cfec00fe29c649a", + "action": "add" + } + }, + "CreateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "55e0a7b0ac428a6abb771ffcb925ee79cdd752a4b83058aa4b71fbef2a9fee63", + "action": "add" + } + }, + "TwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "e538734d20be3b477e188eb91f66600c2e654bb32e34806ef24329e48238bf18", + "action": "add" + } + }, + "SyftLog": { + "3": { + "version": 3, + "hash": "8964d48238672e0e5d5db6b932cda4ee8eb77581949ab3f7a38a05b1efec13b7", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "ad6ef18ccd87fced669f3824d27ab423aaf52574b0cd4f720687aeaba77524e5", + "action": "add" + } + }, + "SyncState": { + "1": { + "version": 1, + "hash": "a0616775ec8ef0629e2d91e0df9cc4237ea3674727eda1ce367f1897ee35767d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "925f1b8ccd4b9d542700a111f9c4bdd28bfa55978d805ddb2fb3c108cc940d19", + "action": "add" + }, + "3": { + "version": 3, + "hash": "1b5fd28919cb496f8073a64a57736d477ace1ed969962b1b049cecf766f2661c", + "action": "add" + } + }, + "NodePeer": { + "3": { + "version": 3, + "hash": "ec0e39fc77ddb542558519d6a1f7c55f41cc037b6312792333792a04feea57e6", + "action": "add" + } + }, + "AssociationRequestChange": { + "1": { + "version": 1, + "hash": "508550c43e9e3f30243813c23eb6eec20918923d7ba09498cddbcd7e8bfa4539", + "action": "add" + } + }, + "APIEndpointQueueItem": { + "1": { + "version": 1, + "hash": "d31b2edfb767401c810584baccd27e4f566181c3ef7706618a82eb25ae20ff6d", + "action": "add" + } + }, + "NodeMetadataUpdate": { + "2": { + "version": 2, + "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed", + "action": "remove" + } + }, + "SyncStateItem": { + "1": { + "version": 1, + "hash": "4dbfa0813f5a3f7be0b36249ff2d67e395ad7c9e138c5a122fc7342b8dcc4b92", + "action": "remove" + } + }, + "VeilidConnection": { + "1": { + "version": 1, + "hash": "c1796e7b01c9eae0dbf59cfd5c2c2f0e7eba593e0cea615717246572b27aae4b", + "action": "remove" + } + }, + "CreateCustomWorkerPoolChange": { + "3": { + "version": 3, + "hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c", + "action": "add" + } + }, + "NodePeerUpdate": { + "1": { + "version": 1, + "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9", + "action": "add" + } + }, + "JobInfo": { + "2": { + "version": 2, + "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", + "action": "remove" + } + } + } + } +} diff --git a/packages/syft/src/syft/protocol/8e0f05e65fec4fe7bbd364ac930dfa74.json b/packages/syft/src/syft/protocol/8e0f05e65fec4fe7bbd364ac930dfa74.json new file mode 100644 index 00000000000..2d74bfc9cf2 --- /dev/null +++ b/packages/syft/src/syft/protocol/8e0f05e65fec4fe7bbd364ac930dfa74.json @@ -0,0 +1,14 @@ +{ + "1": { + "release_name": "0.8.2.json" + }, + "2": { + "release_name": "0.8.3.json" + }, + "3": { + "release_name": "0.8.4.json" + }, + "4": { + "release_name": "0.8.6.json" + } +} diff --git a/packages/syft/src/syft/protocol/cc424e329ef6476faf5d77d2851589ed.json b/packages/syft/src/syft/protocol/cc424e329ef6476faf5d77d2851589ed.json new file mode 100644 index 00000000000..375aa1af66b --- /dev/null +++ b/packages/syft/src/syft/protocol/cc424e329ef6476faf5d77d2851589ed.json @@ -0,0 +1,278 @@ +{ + "1": { + "release_name": "0.8.2.json" + }, + "2": { + "release_name": "0.8.3.json" + }, + "3": { + "release_name": "0.8.4.json" + }, + "4": { + "release_name": "0.8.6.json" + }, + "dev": { + "object_versions": { + "NodeMetadata": { + "5": { + "version": 5, + "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8", + "action": "add" + } + }, + "SyftAPI": { + "3": { + "version": 3, + "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a", + "action": "add" + } + }, + "HTMLObject": { + "1": { + "version": 1, + "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863", + "action": "add" + } + }, + "NodeSettingsUpdate": { + "2": { + "version": 2, + "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0", + "action": "add" + }, + "5": { + "version": 5, + "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e", + "action": "add" + } + }, + "NodeSettings": { + "3": { + "version": 3, + "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1", + "action": "remove" + }, + "5": { + "version": 5, + "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e", + "action": "add" + }, + "6": { + "version": 6, + "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403", + "action": "add" + } + }, + "BlobRetrievalByURL": { + "5": { + "version": 5, + "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3", + "action": "add" + } + }, + "EnclaveMetadata": { + "2": { + "version": 2, + "hash": "6dcc26695abc6a9ecd9d7d1e6507a9f1a92cc5ccd10987e92419bf984245f9a1", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "d2f23411927c68e2307a84d180ad053b3e4ba12d74aba64d34dac224c90e815d", + "action": "add" + } + }, + "CustomEndpointActionObject": { + "1": { + "version": 1, + "hash": "642facc6cafbaad4de030a33cd619bd68ac31a32b0db07ddc1c1d5d7f914503e", + "action": "add" + } + }, + "JobItem": { + "4": { + "version": 4, + "hash": "6a7cc7c2bb4dd234c1508b0af4d3b403cd3b7b427578a775bf80dc36891923ed", + "action": "remove" + }, + "6": { + "version": 6, + "hash": "865a2ed791b8abd20d76e9a6bfae7ae7dad51b5ebfd8ff728aab25af93fa5570", + "action": "add" + } + }, + "ExecutionOutput": { + "1": { + "version": 1, + "hash": "c2337099eba14767ead75fcc1b1fa265c1898461ede0b5e7758a0e8d11d1757d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "854fe9df5bcbb5c7e5b7c467bac423cd98c32f93d6876fea7b8eb6c08f6596da", + "action": "add" + } + }, + "CreateCustomImageChange": { + "3": { + "version": 3, + "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995", + "action": "add" + } + }, + "TwinAPIContextView": { + "1": { + "version": 1, + "hash": "7d368102d0b473009af3b8c46e0ea6d35893c5ebb172b373ad7d52553c12c9fa", + "action": "add" + } + }, + "CustomAPIView": { + "1": { + "version": 1, + "hash": "0b9afdd554f0b353c07256e2522342be1302b395d649f1cbabc555e5baecb150", + "action": "add" + } + }, + "CustomApiEndpoint": { + "1": { + "version": 1, + "hash": "13617f3dce60fa692421e0d9deda7ffd365ec02d4a062c18510b48457b6eba02", + "action": "add" + } + }, + "PrivateAPIEndpoint": { + "1": { + "version": 1, + "hash": "004ec19753263440e2896b4e35d7a6305322934512f473f37d54043af5726fe6", + "action": "add" + } + }, + "PublicAPIEndpoint": { + "1": { + "version": 1, + "hash": "5589b6bdd045ee9c45987dae78fd5a1124530a6c493e2328b304d9273b75177f", + "action": "add" + } + }, + "UpdateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "6d8effd404f15d4378b1ff3382e0622b9e5a637d9db342d43cfec00fe29c649a", + "action": "add" + } + }, + "CreateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "55e0a7b0ac428a6abb771ffcb925ee79cdd752a4b83058aa4b71fbef2a9fee63", + "action": "add" + } + }, + "TwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "e538734d20be3b477e188eb91f66600c2e654bb32e34806ef24329e48238bf18", + "action": "add" + } + }, + "SyftLog": { + "3": { + "version": 3, + "hash": "8964d48238672e0e5d5db6b932cda4ee8eb77581949ab3f7a38a05b1efec13b7", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "ad6ef18ccd87fced669f3824d27ab423aaf52574b0cd4f720687aeaba77524e5", + "action": "add" + } + }, + "SyncState": { + "1": { + "version": 1, + "hash": "a0616775ec8ef0629e2d91e0df9cc4237ea3674727eda1ce367f1897ee35767d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "925f1b8ccd4b9d542700a111f9c4bdd28bfa55978d805ddb2fb3c108cc940d19", + "action": "add" + }, + "3": { + "version": 3, + "hash": "1b5fd28919cb496f8073a64a57736d477ace1ed969962b1b049cecf766f2661c", + "action": "add" + } + }, + "NodePeer": { + "3": { + "version": 3, + "hash": "ec0e39fc77ddb542558519d6a1f7c55f41cc037b6312792333792a04feea57e6", + "action": "add" + } + }, + "AssociationRequestChange": { + "1": { + "version": 1, + "hash": "508550c43e9e3f30243813c23eb6eec20918923d7ba09498cddbcd7e8bfa4539", + "action": "add" + } + }, + "APIEndpointQueueItem": { + "1": { + "version": 1, + "hash": "d31b2edfb767401c810584baccd27e4f566181c3ef7706618a82eb25ae20ff6d", + "action": "add" + } + }, + "NodeMetadataUpdate": { + "2": { + "version": 2, + "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed", + "action": "remove" + } + }, + "SyncStateItem": { + "1": { + "version": 1, + "hash": "4dbfa0813f5a3f7be0b36249ff2d67e395ad7c9e138c5a122fc7342b8dcc4b92", + "action": "remove" + } + }, + "VeilidConnection": { + "1": { + "version": 1, + "hash": "c1796e7b01c9eae0dbf59cfd5c2c2f0e7eba593e0cea615717246572b27aae4b", + "action": "remove" + } + }, + "CreateCustomWorkerPoolChange": { + "3": { + "version": 3, + "hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c", + "action": "add" + } + }, + "NodePeerUpdate": { + "1": { + "version": 1, + "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9", + "action": "add" + } + }, + "JobInfo": { + "2": { + "version": 2, + "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", + "action": "remove" + } + } + } + } +} diff --git a/packages/syft/src/syft/protocol/d06611a4a0de458d9257e04c975487e0.json b/packages/syft/src/syft/protocol/d06611a4a0de458d9257e04c975487e0.json new file mode 100644 index 00000000000..375aa1af66b --- /dev/null +++ b/packages/syft/src/syft/protocol/d06611a4a0de458d9257e04c975487e0.json @@ -0,0 +1,278 @@ +{ + "1": { + "release_name": "0.8.2.json" + }, + "2": { + "release_name": "0.8.3.json" + }, + "3": { + "release_name": "0.8.4.json" + }, + "4": { + "release_name": "0.8.6.json" + }, + "dev": { + "object_versions": { + "NodeMetadata": { + "5": { + "version": 5, + "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8", + "action": "add" + } + }, + "SyftAPI": { + "3": { + "version": 3, + "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a", + "action": "add" + } + }, + "HTMLObject": { + "1": { + "version": 1, + "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863", + "action": "add" + } + }, + "NodeSettingsUpdate": { + "2": { + "version": 2, + "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0", + "action": "add" + }, + "5": { + "version": 5, + "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e", + "action": "add" + } + }, + "NodeSettings": { + "3": { + "version": 3, + "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1", + "action": "remove" + }, + "5": { + "version": 5, + "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e", + "action": "add" + }, + "6": { + "version": 6, + "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403", + "action": "add" + } + }, + "BlobRetrievalByURL": { + "5": { + "version": 5, + "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3", + "action": "add" + } + }, + "EnclaveMetadata": { + "2": { + "version": 2, + "hash": "6dcc26695abc6a9ecd9d7d1e6507a9f1a92cc5ccd10987e92419bf984245f9a1", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "d2f23411927c68e2307a84d180ad053b3e4ba12d74aba64d34dac224c90e815d", + "action": "add" + } + }, + "CustomEndpointActionObject": { + "1": { + "version": 1, + "hash": "642facc6cafbaad4de030a33cd619bd68ac31a32b0db07ddc1c1d5d7f914503e", + "action": "add" + } + }, + "JobItem": { + "4": { + "version": 4, + "hash": "6a7cc7c2bb4dd234c1508b0af4d3b403cd3b7b427578a775bf80dc36891923ed", + "action": "remove" + }, + "6": { + "version": 6, + "hash": "865a2ed791b8abd20d76e9a6bfae7ae7dad51b5ebfd8ff728aab25af93fa5570", + "action": "add" + } + }, + "ExecutionOutput": { + "1": { + "version": 1, + "hash": "c2337099eba14767ead75fcc1b1fa265c1898461ede0b5e7758a0e8d11d1757d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "854fe9df5bcbb5c7e5b7c467bac423cd98c32f93d6876fea7b8eb6c08f6596da", + "action": "add" + } + }, + "CreateCustomImageChange": { + "3": { + "version": 3, + "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995", + "action": "add" + } + }, + "TwinAPIContextView": { + "1": { + "version": 1, + "hash": "7d368102d0b473009af3b8c46e0ea6d35893c5ebb172b373ad7d52553c12c9fa", + "action": "add" + } + }, + "CustomAPIView": { + "1": { + "version": 1, + "hash": "0b9afdd554f0b353c07256e2522342be1302b395d649f1cbabc555e5baecb150", + "action": "add" + } + }, + "CustomApiEndpoint": { + "1": { + "version": 1, + "hash": "13617f3dce60fa692421e0d9deda7ffd365ec02d4a062c18510b48457b6eba02", + "action": "add" + } + }, + "PrivateAPIEndpoint": { + "1": { + "version": 1, + "hash": "004ec19753263440e2896b4e35d7a6305322934512f473f37d54043af5726fe6", + "action": "add" + } + }, + "PublicAPIEndpoint": { + "1": { + "version": 1, + "hash": "5589b6bdd045ee9c45987dae78fd5a1124530a6c493e2328b304d9273b75177f", + "action": "add" + } + }, + "UpdateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "6d8effd404f15d4378b1ff3382e0622b9e5a637d9db342d43cfec00fe29c649a", + "action": "add" + } + }, + "CreateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "55e0a7b0ac428a6abb771ffcb925ee79cdd752a4b83058aa4b71fbef2a9fee63", + "action": "add" + } + }, + "TwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "e538734d20be3b477e188eb91f66600c2e654bb32e34806ef24329e48238bf18", + "action": "add" + } + }, + "SyftLog": { + "3": { + "version": 3, + "hash": "8964d48238672e0e5d5db6b932cda4ee8eb77581949ab3f7a38a05b1efec13b7", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "ad6ef18ccd87fced669f3824d27ab423aaf52574b0cd4f720687aeaba77524e5", + "action": "add" + } + }, + "SyncState": { + "1": { + "version": 1, + "hash": "a0616775ec8ef0629e2d91e0df9cc4237ea3674727eda1ce367f1897ee35767d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "925f1b8ccd4b9d542700a111f9c4bdd28bfa55978d805ddb2fb3c108cc940d19", + "action": "add" + }, + "3": { + "version": 3, + "hash": "1b5fd28919cb496f8073a64a57736d477ace1ed969962b1b049cecf766f2661c", + "action": "add" + } + }, + "NodePeer": { + "3": { + "version": 3, + "hash": "ec0e39fc77ddb542558519d6a1f7c55f41cc037b6312792333792a04feea57e6", + "action": "add" + } + }, + "AssociationRequestChange": { + "1": { + "version": 1, + "hash": "508550c43e9e3f30243813c23eb6eec20918923d7ba09498cddbcd7e8bfa4539", + "action": "add" + } + }, + "APIEndpointQueueItem": { + "1": { + "version": 1, + "hash": "d31b2edfb767401c810584baccd27e4f566181c3ef7706618a82eb25ae20ff6d", + "action": "add" + } + }, + "NodeMetadataUpdate": { + "2": { + "version": 2, + "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed", + "action": "remove" + } + }, + "SyncStateItem": { + "1": { + "version": 1, + "hash": "4dbfa0813f5a3f7be0b36249ff2d67e395ad7c9e138c5a122fc7342b8dcc4b92", + "action": "remove" + } + }, + "VeilidConnection": { + "1": { + "version": 1, + "hash": "c1796e7b01c9eae0dbf59cfd5c2c2f0e7eba593e0cea615717246572b27aae4b", + "action": "remove" + } + }, + "CreateCustomWorkerPoolChange": { + "3": { + "version": 3, + "hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c", + "action": "add" + } + }, + "NodePeerUpdate": { + "1": { + "version": 1, + "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9", + "action": "add" + } + }, + "JobInfo": { + "2": { + "version": 2, + "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", + "action": "remove" + } + } + } + } +} diff --git a/packages/syft/src/syft/protocol/ed92a248b88e4484b6937cd58d4ad7c1.json b/packages/syft/src/syft/protocol/ed92a248b88e4484b6937cd58d4ad7c1.json new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/syft/src/syft/protocol/f1c5b5baa491435fab432d198567ed7a.json b/packages/syft/src/syft/protocol/f1c5b5baa491435fab432d198567ed7a.json new file mode 100644 index 00000000000..375aa1af66b --- /dev/null +++ b/packages/syft/src/syft/protocol/f1c5b5baa491435fab432d198567ed7a.json @@ -0,0 +1,278 @@ +{ + "1": { + "release_name": "0.8.2.json" + }, + "2": { + "release_name": "0.8.3.json" + }, + "3": { + "release_name": "0.8.4.json" + }, + "4": { + "release_name": "0.8.6.json" + }, + "dev": { + "object_versions": { + "NodeMetadata": { + "5": { + "version": 5, + "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8", + "action": "add" + } + }, + "SyftAPI": { + "3": { + "version": 3, + "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a", + "action": "add" + } + }, + "HTMLObject": { + "1": { + "version": 1, + "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863", + "action": "add" + } + }, + "NodeSettingsUpdate": { + "2": { + "version": 2, + "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0", + "action": "add" + }, + "5": { + "version": 5, + "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e", + "action": "add" + } + }, + "NodeSettings": { + "3": { + "version": 3, + "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1", + "action": "remove" + }, + "5": { + "version": 5, + "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e", + "action": "add" + }, + "6": { + "version": 6, + "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403", + "action": "add" + } + }, + "BlobRetrievalByURL": { + "5": { + "version": 5, + "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3", + "action": "add" + } + }, + "EnclaveMetadata": { + "2": { + "version": 2, + "hash": "6dcc26695abc6a9ecd9d7d1e6507a9f1a92cc5ccd10987e92419bf984245f9a1", + "action": "remove" + }, + "3": { + "version": 3, + "hash": "d2f23411927c68e2307a84d180ad053b3e4ba12d74aba64d34dac224c90e815d", + "action": "add" + } + }, + "CustomEndpointActionObject": { + "1": { + "version": 1, + "hash": "642facc6cafbaad4de030a33cd619bd68ac31a32b0db07ddc1c1d5d7f914503e", + "action": "add" + } + }, + "JobItem": { + "4": { + "version": 4, + "hash": "6a7cc7c2bb4dd234c1508b0af4d3b403cd3b7b427578a775bf80dc36891923ed", + "action": "remove" + }, + "6": { + "version": 6, + "hash": "865a2ed791b8abd20d76e9a6bfae7ae7dad51b5ebfd8ff728aab25af93fa5570", + "action": "add" + } + }, + "ExecutionOutput": { + "1": { + "version": 1, + "hash": "c2337099eba14767ead75fcc1b1fa265c1898461ede0b5e7758a0e8d11d1757d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "854fe9df5bcbb5c7e5b7c467bac423cd98c32f93d6876fea7b8eb6c08f6596da", + "action": "add" + } + }, + "CreateCustomImageChange": { + "3": { + "version": 3, + "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995", + "action": "add" + } + }, + "TwinAPIContextView": { + "1": { + "version": 1, + "hash": "7d368102d0b473009af3b8c46e0ea6d35893c5ebb172b373ad7d52553c12c9fa", + "action": "add" + } + }, + "CustomAPIView": { + "1": { + "version": 1, + "hash": "0b9afdd554f0b353c07256e2522342be1302b395d649f1cbabc555e5baecb150", + "action": "add" + } + }, + "CustomApiEndpoint": { + "1": { + "version": 1, + "hash": "13617f3dce60fa692421e0d9deda7ffd365ec02d4a062c18510b48457b6eba02", + "action": "add" + } + }, + "PrivateAPIEndpoint": { + "1": { + "version": 1, + "hash": "004ec19753263440e2896b4e35d7a6305322934512f473f37d54043af5726fe6", + "action": "add" + } + }, + "PublicAPIEndpoint": { + "1": { + "version": 1, + "hash": "5589b6bdd045ee9c45987dae78fd5a1124530a6c493e2328b304d9273b75177f", + "action": "add" + } + }, + "UpdateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "6d8effd404f15d4378b1ff3382e0622b9e5a637d9db342d43cfec00fe29c649a", + "action": "add" + } + }, + "CreateTwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "55e0a7b0ac428a6abb771ffcb925ee79cdd752a4b83058aa4b71fbef2a9fee63", + "action": "add" + } + }, + "TwinAPIEndpoint": { + "1": { + "version": 1, + "hash": "e538734d20be3b477e188eb91f66600c2e654bb32e34806ef24329e48238bf18", + "action": "add" + } + }, + "SyftLog": { + "3": { + "version": 3, + "hash": "8964d48238672e0e5d5db6b932cda4ee8eb77581949ab3f7a38a05b1efec13b7", + "action": "remove" + }, + "4": { + "version": 4, + "hash": "ad6ef18ccd87fced669f3824d27ab423aaf52574b0cd4f720687aeaba77524e5", + "action": "add" + } + }, + "SyncState": { + "1": { + "version": 1, + "hash": "a0616775ec8ef0629e2d91e0df9cc4237ea3674727eda1ce367f1897ee35767d", + "action": "remove" + }, + "2": { + "version": 2, + "hash": "925f1b8ccd4b9d542700a111f9c4bdd28bfa55978d805ddb2fb3c108cc940d19", + "action": "add" + }, + "3": { + "version": 3, + "hash": "1b5fd28919cb496f8073a64a57736d477ace1ed969962b1b049cecf766f2661c", + "action": "add" + } + }, + "NodePeer": { + "3": { + "version": 3, + "hash": "ec0e39fc77ddb542558519d6a1f7c55f41cc037b6312792333792a04feea57e6", + "action": "add" + } + }, + "AssociationRequestChange": { + "1": { + "version": 1, + "hash": "508550c43e9e3f30243813c23eb6eec20918923d7ba09498cddbcd7e8bfa4539", + "action": "add" + } + }, + "APIEndpointQueueItem": { + "1": { + "version": 1, + "hash": "d31b2edfb767401c810584baccd27e4f566181c3ef7706618a82eb25ae20ff6d", + "action": "add" + } + }, + "NodeMetadataUpdate": { + "2": { + "version": 2, + "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed", + "action": "remove" + } + }, + "SyncStateItem": { + "1": { + "version": 1, + "hash": "4dbfa0813f5a3f7be0b36249ff2d67e395ad7c9e138c5a122fc7342b8dcc4b92", + "action": "remove" + } + }, + "VeilidConnection": { + "1": { + "version": 1, + "hash": "c1796e7b01c9eae0dbf59cfd5c2c2f0e7eba593e0cea615717246572b27aae4b", + "action": "remove" + } + }, + "CreateCustomWorkerPoolChange": { + "3": { + "version": 3, + "hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c", + "action": "add" + } + }, + "NodePeerUpdate": { + "1": { + "version": 1, + "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9", + "action": "add" + } + }, + "JobInfo": { + "2": { + "version": 2, + "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", + "action": "remove" + } + } + } + } +} diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 5e6ee867978..251282ba019 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -1,6 +1,5 @@ # stdlib import importlib -import sys from typing import Any # third party @@ -83,21 +82,48 @@ def set( context: AuthedServiceContext, action_object: ActionObject | TwinObject, add_storage_permission: bool = True, + ignore_detached_objs: bool = False, ) -> Result[ActionObject, str]: return self._set( context, action_object, has_result_read_permission=True, add_storage_permission=add_storage_permission, + ignore_detached_objs=ignore_detached_objs, ) + def is_detached_obj( + self, + action_object: ActionObject | TwinObject, + ignore_detached_obj: bool = False, + ) -> bool: + if ( + isinstance(action_object, TwinObject) + and ( + action_object.mock_obj.syft_blob_storage_entry_id is None + or action_object.private_obj.syft_blob_storage_entry_id is None + ) + and not ignore_detached_obj + ): + return True + if isinstance(action_object, ActionObject) and ( + action_object.syft_blob_storage_entry_id is None and not ignore_detached_obj + ): + return True + return False + def _set( self, context: AuthedServiceContext, action_object: ActionObject | TwinObject, has_result_read_permission: bool = False, add_storage_permission: bool = True, + ignore_detached_objs: bool = False, ) -> Result[ActionObject, str]: + if self.is_detached_obj(action_object, ignore_detached_objs): + return Err( + "you uploaded an ActionObject that is not yet in the blob storage" + ) """Save an object to the action store""" # 🟡 TODO 9: Create some kind of type checking / protocol for SyftSerializable @@ -125,19 +151,13 @@ def _set( ) if result.is_ok(): if isinstance(action_object, TwinObject): - if action_object.mock_obj.syft_blob_storage_entry_id is not None: - print( - action_object.mock_obj.syft_blob_storage_entry_id, - file=sys.stderr, - ) - blob_id = action_object.mock_obj.syft_blob_storage_entry_id - permission = ActionObjectPermission( - blob_id, ActionPermission.ALL_READ - ) - blob_storage_service: AbstractService = context.node.get_service( - BlobStorageService - ) - blob_storage_service.stash.add_permission(permission) + # give read permission to the mock + blob_id = action_object.mock_obj.syft_blob_storage_entry_id + permission = ActionObjectPermission(blob_id, ActionPermission.ALL_READ) + blob_storage_service: AbstractService = context.node.get_service( + BlobStorageService + ) + blob_storage_service.stash.add_permission(permission) if has_result_read_permission: action_object = action_object.private else: diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 640d49962b8..d0df37b0cff 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -1374,7 +1374,13 @@ def launch_job(func: UserCode, **kwargs: Any) -> Job | None: kw2id = {} for k, v in kwargs.items(): value = ActionObject.from_obj(v) - ptr = action_service._set(context, value) + ptr = action_service.set_result_to_store( + value, context, has_result_read_permissions=False + ) + if ptr.is_err(): + raise ValueError( + f"failed to create argument {k} for launch job using value {v}" + ) ptr = ptr.ok() kw2id[k] = ptr.id try: From 748db251a7550f4c74d9270c285e6f43955f1bfa Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 14 Jun 2024 15:36:49 +0200 Subject: [PATCH 145/313] fix bugs --- .../7ad602bd16c3479bb6058fa0864ac9f2.json | 278 ------------------ .../8b186d7594d0485b90f95f6c95e35a25.json | 278 ------------------ .../8e0f05e65fec4fe7bbd364ac930dfa74.json | 14 - .../cc424e329ef6476faf5d77d2851589ed.json | 278 ------------------ .../d06611a4a0de458d9257e04c975487e0.json | 278 ------------------ .../ed92a248b88e4484b6937cd58d4ad7c1.json | 0 .../f1c5b5baa491435fab432d198567ed7a.json | 278 ------------------ 7 files changed, 1404 deletions(-) delete mode 100644 packages/syft/src/syft/protocol/7ad602bd16c3479bb6058fa0864ac9f2.json delete mode 100644 packages/syft/src/syft/protocol/8b186d7594d0485b90f95f6c95e35a25.json delete mode 100644 packages/syft/src/syft/protocol/8e0f05e65fec4fe7bbd364ac930dfa74.json delete mode 100644 packages/syft/src/syft/protocol/cc424e329ef6476faf5d77d2851589ed.json delete mode 100644 packages/syft/src/syft/protocol/d06611a4a0de458d9257e04c975487e0.json delete mode 100644 packages/syft/src/syft/protocol/ed92a248b88e4484b6937cd58d4ad7c1.json delete mode 100644 packages/syft/src/syft/protocol/f1c5b5baa491435fab432d198567ed7a.json diff --git a/packages/syft/src/syft/protocol/7ad602bd16c3479bb6058fa0864ac9f2.json b/packages/syft/src/syft/protocol/7ad602bd16c3479bb6058fa0864ac9f2.json deleted file mode 100644 index 375aa1af66b..00000000000 --- a/packages/syft/src/syft/protocol/7ad602bd16c3479bb6058fa0864ac9f2.json +++ /dev/null @@ -1,278 +0,0 @@ -{ - "1": { - "release_name": "0.8.2.json" - }, - "2": { - "release_name": "0.8.3.json" - }, - "3": { - "release_name": "0.8.4.json" - }, - "4": { - "release_name": "0.8.6.json" - }, - "dev": { - "object_versions": { - "NodeMetadata": { - "5": { - "version": 5, - "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8", - "action": "add" - } - }, - "SyftAPI": { - "3": { - "version": 3, - "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a", - "action": "add" - } - }, - "HTMLObject": { - "1": { - "version": 1, - "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863", - "action": "add" - } - }, - "NodeSettingsUpdate": { - "2": { - "version": 2, - "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261", - "action": "remove" - }, - "4": { - "version": 4, - "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0", - "action": "add" - }, - "5": { - "version": 5, - "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e", - "action": "add" - } - }, - "NodeSettings": { - "3": { - "version": 3, - "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1", - "action": "remove" - }, - "5": { - "version": 5, - "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e", - "action": "add" - }, - "6": { - "version": 6, - "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403", - "action": "add" - } - }, - "BlobRetrievalByURL": { - "5": { - "version": 5, - "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3", - "action": "add" - } - }, - "EnclaveMetadata": { - "2": { - "version": 2, - "hash": "6dcc26695abc6a9ecd9d7d1e6507a9f1a92cc5ccd10987e92419bf984245f9a1", - "action": "remove" - }, - "3": { - "version": 3, - "hash": "d2f23411927c68e2307a84d180ad053b3e4ba12d74aba64d34dac224c90e815d", - "action": "add" - } - }, - "CustomEndpointActionObject": { - "1": { - "version": 1, - "hash": "642facc6cafbaad4de030a33cd619bd68ac31a32b0db07ddc1c1d5d7f914503e", - "action": "add" - } - }, - "JobItem": { - "4": { - "version": 4, - "hash": "6a7cc7c2bb4dd234c1508b0af4d3b403cd3b7b427578a775bf80dc36891923ed", - "action": "remove" - }, - "6": { - "version": 6, - "hash": "865a2ed791b8abd20d76e9a6bfae7ae7dad51b5ebfd8ff728aab25af93fa5570", - "action": "add" - } - }, - "ExecutionOutput": { - "1": { - "version": 1, - "hash": "c2337099eba14767ead75fcc1b1fa265c1898461ede0b5e7758a0e8d11d1757d", - "action": "remove" - }, - "2": { - "version": 2, - "hash": "854fe9df5bcbb5c7e5b7c467bac423cd98c32f93d6876fea7b8eb6c08f6596da", - "action": "add" - } - }, - "CreateCustomImageChange": { - "3": { - "version": 3, - "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995", - "action": "add" - } - }, - "TwinAPIContextView": { - "1": { - "version": 1, - "hash": "7d368102d0b473009af3b8c46e0ea6d35893c5ebb172b373ad7d52553c12c9fa", - "action": "add" - } - }, - "CustomAPIView": { - "1": { - "version": 1, - "hash": "0b9afdd554f0b353c07256e2522342be1302b395d649f1cbabc555e5baecb150", - "action": "add" - } - }, - "CustomApiEndpoint": { - "1": { - "version": 1, - "hash": "13617f3dce60fa692421e0d9deda7ffd365ec02d4a062c18510b48457b6eba02", - "action": "add" - } - }, - "PrivateAPIEndpoint": { - "1": { - "version": 1, - "hash": "004ec19753263440e2896b4e35d7a6305322934512f473f37d54043af5726fe6", - "action": "add" - } - }, - "PublicAPIEndpoint": { - "1": { - "version": 1, - "hash": "5589b6bdd045ee9c45987dae78fd5a1124530a6c493e2328b304d9273b75177f", - "action": "add" - } - }, - "UpdateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "6d8effd404f15d4378b1ff3382e0622b9e5a637d9db342d43cfec00fe29c649a", - "action": "add" - } - }, - "CreateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "55e0a7b0ac428a6abb771ffcb925ee79cdd752a4b83058aa4b71fbef2a9fee63", - "action": "add" - } - }, - "TwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "e538734d20be3b477e188eb91f66600c2e654bb32e34806ef24329e48238bf18", - "action": "add" - } - }, - "SyftLog": { - "3": { - "version": 3, - "hash": "8964d48238672e0e5d5db6b932cda4ee8eb77581949ab3f7a38a05b1efec13b7", - "action": "remove" - }, - "4": { - "version": 4, - "hash": "ad6ef18ccd87fced669f3824d27ab423aaf52574b0cd4f720687aeaba77524e5", - "action": "add" - } - }, - "SyncState": { - "1": { - "version": 1, - "hash": "a0616775ec8ef0629e2d91e0df9cc4237ea3674727eda1ce367f1897ee35767d", - "action": "remove" - }, - "2": { - "version": 2, - "hash": "925f1b8ccd4b9d542700a111f9c4bdd28bfa55978d805ddb2fb3c108cc940d19", - "action": "add" - }, - "3": { - "version": 3, - "hash": "1b5fd28919cb496f8073a64a57736d477ace1ed969962b1b049cecf766f2661c", - "action": "add" - } - }, - "NodePeer": { - "3": { - "version": 3, - "hash": "ec0e39fc77ddb542558519d6a1f7c55f41cc037b6312792333792a04feea57e6", - "action": "add" - } - }, - "AssociationRequestChange": { - "1": { - "version": 1, - "hash": "508550c43e9e3f30243813c23eb6eec20918923d7ba09498cddbcd7e8bfa4539", - "action": "add" - } - }, - "APIEndpointQueueItem": { - "1": { - "version": 1, - "hash": "d31b2edfb767401c810584baccd27e4f566181c3ef7706618a82eb25ae20ff6d", - "action": "add" - } - }, - "NodeMetadataUpdate": { - "2": { - "version": 2, - "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed", - "action": "remove" - } - }, - "SyncStateItem": { - "1": { - "version": 1, - "hash": "4dbfa0813f5a3f7be0b36249ff2d67e395ad7c9e138c5a122fc7342b8dcc4b92", - "action": "remove" - } - }, - "VeilidConnection": { - "1": { - "version": 1, - "hash": "c1796e7b01c9eae0dbf59cfd5c2c2f0e7eba593e0cea615717246572b27aae4b", - "action": "remove" - } - }, - "CreateCustomWorkerPoolChange": { - "3": { - "version": 3, - "hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c", - "action": "add" - } - }, - "NodePeerUpdate": { - "1": { - "version": 1, - "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9", - "action": "add" - } - }, - "JobInfo": { - "2": { - "version": 2, - "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", - "action": "remove" - } - } - } - } -} diff --git a/packages/syft/src/syft/protocol/8b186d7594d0485b90f95f6c95e35a25.json b/packages/syft/src/syft/protocol/8b186d7594d0485b90f95f6c95e35a25.json deleted file mode 100644 index 375aa1af66b..00000000000 --- a/packages/syft/src/syft/protocol/8b186d7594d0485b90f95f6c95e35a25.json +++ /dev/null @@ -1,278 +0,0 @@ -{ - "1": { - "release_name": "0.8.2.json" - }, - "2": { - "release_name": "0.8.3.json" - }, - "3": { - "release_name": "0.8.4.json" - }, - "4": { - "release_name": "0.8.6.json" - }, - "dev": { - "object_versions": { - "NodeMetadata": { - "5": { - "version": 5, - "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8", - "action": "add" - } - }, - "SyftAPI": { - "3": { - "version": 3, - "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a", - "action": "add" - } - }, - "HTMLObject": { - "1": { - "version": 1, - "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863", - "action": "add" - } - }, - "NodeSettingsUpdate": { - "2": { - "version": 2, - "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261", - "action": "remove" - }, - "4": { - "version": 4, - "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0", - "action": "add" - }, - "5": { - "version": 5, - "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e", - "action": "add" - } - }, - "NodeSettings": { - "3": { - "version": 3, - "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1", - "action": "remove" - }, - "5": { - "version": 5, - "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e", - "action": "add" - }, - "6": { - "version": 6, - "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403", - "action": "add" - } - }, - "BlobRetrievalByURL": { - "5": { - "version": 5, - "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3", - "action": "add" - } - }, - "EnclaveMetadata": { - "2": { - "version": 2, - "hash": "6dcc26695abc6a9ecd9d7d1e6507a9f1a92cc5ccd10987e92419bf984245f9a1", - "action": "remove" - }, - "3": { - "version": 3, - "hash": "d2f23411927c68e2307a84d180ad053b3e4ba12d74aba64d34dac224c90e815d", - "action": "add" - } - }, - "CustomEndpointActionObject": { - "1": { - "version": 1, - "hash": "642facc6cafbaad4de030a33cd619bd68ac31a32b0db07ddc1c1d5d7f914503e", - "action": "add" - } - }, - "JobItem": { - "4": { - "version": 4, - "hash": "6a7cc7c2bb4dd234c1508b0af4d3b403cd3b7b427578a775bf80dc36891923ed", - "action": "remove" - }, - "6": { - "version": 6, - "hash": "865a2ed791b8abd20d76e9a6bfae7ae7dad51b5ebfd8ff728aab25af93fa5570", - "action": "add" - } - }, - "ExecutionOutput": { - "1": { - "version": 1, - "hash": "c2337099eba14767ead75fcc1b1fa265c1898461ede0b5e7758a0e8d11d1757d", - "action": "remove" - }, - "2": { - "version": 2, - "hash": "854fe9df5bcbb5c7e5b7c467bac423cd98c32f93d6876fea7b8eb6c08f6596da", - "action": "add" - } - }, - "CreateCustomImageChange": { - "3": { - "version": 3, - "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995", - "action": "add" - } - }, - "TwinAPIContextView": { - "1": { - "version": 1, - "hash": "7d368102d0b473009af3b8c46e0ea6d35893c5ebb172b373ad7d52553c12c9fa", - "action": "add" - } - }, - "CustomAPIView": { - "1": { - "version": 1, - "hash": "0b9afdd554f0b353c07256e2522342be1302b395d649f1cbabc555e5baecb150", - "action": "add" - } - }, - "CustomApiEndpoint": { - "1": { - "version": 1, - "hash": "13617f3dce60fa692421e0d9deda7ffd365ec02d4a062c18510b48457b6eba02", - "action": "add" - } - }, - "PrivateAPIEndpoint": { - "1": { - "version": 1, - "hash": "004ec19753263440e2896b4e35d7a6305322934512f473f37d54043af5726fe6", - "action": "add" - } - }, - "PublicAPIEndpoint": { - "1": { - "version": 1, - "hash": "5589b6bdd045ee9c45987dae78fd5a1124530a6c493e2328b304d9273b75177f", - "action": "add" - } - }, - "UpdateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "6d8effd404f15d4378b1ff3382e0622b9e5a637d9db342d43cfec00fe29c649a", - "action": "add" - } - }, - "CreateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "55e0a7b0ac428a6abb771ffcb925ee79cdd752a4b83058aa4b71fbef2a9fee63", - "action": "add" - } - }, - "TwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "e538734d20be3b477e188eb91f66600c2e654bb32e34806ef24329e48238bf18", - "action": "add" - } - }, - "SyftLog": { - "3": { - "version": 3, - "hash": "8964d48238672e0e5d5db6b932cda4ee8eb77581949ab3f7a38a05b1efec13b7", - "action": "remove" - }, - "4": { - "version": 4, - "hash": "ad6ef18ccd87fced669f3824d27ab423aaf52574b0cd4f720687aeaba77524e5", - "action": "add" - } - }, - "SyncState": { - "1": { - "version": 1, - "hash": "a0616775ec8ef0629e2d91e0df9cc4237ea3674727eda1ce367f1897ee35767d", - "action": "remove" - }, - "2": { - "version": 2, - "hash": "925f1b8ccd4b9d542700a111f9c4bdd28bfa55978d805ddb2fb3c108cc940d19", - "action": "add" - }, - "3": { - "version": 3, - "hash": "1b5fd28919cb496f8073a64a57736d477ace1ed969962b1b049cecf766f2661c", - "action": "add" - } - }, - "NodePeer": { - "3": { - "version": 3, - "hash": "ec0e39fc77ddb542558519d6a1f7c55f41cc037b6312792333792a04feea57e6", - "action": "add" - } - }, - "AssociationRequestChange": { - "1": { - "version": 1, - "hash": "508550c43e9e3f30243813c23eb6eec20918923d7ba09498cddbcd7e8bfa4539", - "action": "add" - } - }, - "APIEndpointQueueItem": { - "1": { - "version": 1, - "hash": "d31b2edfb767401c810584baccd27e4f566181c3ef7706618a82eb25ae20ff6d", - "action": "add" - } - }, - "NodeMetadataUpdate": { - "2": { - "version": 2, - "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed", - "action": "remove" - } - }, - "SyncStateItem": { - "1": { - "version": 1, - "hash": "4dbfa0813f5a3f7be0b36249ff2d67e395ad7c9e138c5a122fc7342b8dcc4b92", - "action": "remove" - } - }, - "VeilidConnection": { - "1": { - "version": 1, - "hash": "c1796e7b01c9eae0dbf59cfd5c2c2f0e7eba593e0cea615717246572b27aae4b", - "action": "remove" - } - }, - "CreateCustomWorkerPoolChange": { - "3": { - "version": 3, - "hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c", - "action": "add" - } - }, - "NodePeerUpdate": { - "1": { - "version": 1, - "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9", - "action": "add" - } - }, - "JobInfo": { - "2": { - "version": 2, - "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", - "action": "remove" - } - } - } - } -} diff --git a/packages/syft/src/syft/protocol/8e0f05e65fec4fe7bbd364ac930dfa74.json b/packages/syft/src/syft/protocol/8e0f05e65fec4fe7bbd364ac930dfa74.json deleted file mode 100644 index 2d74bfc9cf2..00000000000 --- a/packages/syft/src/syft/protocol/8e0f05e65fec4fe7bbd364ac930dfa74.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "1": { - "release_name": "0.8.2.json" - }, - "2": { - "release_name": "0.8.3.json" - }, - "3": { - "release_name": "0.8.4.json" - }, - "4": { - "release_name": "0.8.6.json" - } -} diff --git a/packages/syft/src/syft/protocol/cc424e329ef6476faf5d77d2851589ed.json b/packages/syft/src/syft/protocol/cc424e329ef6476faf5d77d2851589ed.json deleted file mode 100644 index 375aa1af66b..00000000000 --- a/packages/syft/src/syft/protocol/cc424e329ef6476faf5d77d2851589ed.json +++ /dev/null @@ -1,278 +0,0 @@ -{ - "1": { - "release_name": "0.8.2.json" - }, - "2": { - "release_name": "0.8.3.json" - }, - "3": { - "release_name": "0.8.4.json" - }, - "4": { - "release_name": "0.8.6.json" - }, - "dev": { - "object_versions": { - "NodeMetadata": { - "5": { - "version": 5, - "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8", - "action": "add" - } - }, - "SyftAPI": { - "3": { - "version": 3, - "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a", - "action": "add" - } - }, - "HTMLObject": { - "1": { - "version": 1, - "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863", - "action": "add" - } - }, - "NodeSettingsUpdate": { - "2": { - "version": 2, - "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261", - "action": "remove" - }, - "4": { - "version": 4, - "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0", - "action": "add" - }, - "5": { - "version": 5, - "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e", - "action": "add" - } - }, - "NodeSettings": { - "3": { - "version": 3, - "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1", - "action": "remove" - }, - "5": { - "version": 5, - "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e", - "action": "add" - }, - "6": { - "version": 6, - "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403", - "action": "add" - } - }, - "BlobRetrievalByURL": { - "5": { - "version": 5, - "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3", - "action": "add" - } - }, - "EnclaveMetadata": { - "2": { - "version": 2, - "hash": "6dcc26695abc6a9ecd9d7d1e6507a9f1a92cc5ccd10987e92419bf984245f9a1", - "action": "remove" - }, - "3": { - "version": 3, - "hash": "d2f23411927c68e2307a84d180ad053b3e4ba12d74aba64d34dac224c90e815d", - "action": "add" - } - }, - "CustomEndpointActionObject": { - "1": { - "version": 1, - "hash": "642facc6cafbaad4de030a33cd619bd68ac31a32b0db07ddc1c1d5d7f914503e", - "action": "add" - } - }, - "JobItem": { - "4": { - "version": 4, - "hash": "6a7cc7c2bb4dd234c1508b0af4d3b403cd3b7b427578a775bf80dc36891923ed", - "action": "remove" - }, - "6": { - "version": 6, - "hash": "865a2ed791b8abd20d76e9a6bfae7ae7dad51b5ebfd8ff728aab25af93fa5570", - "action": "add" - } - }, - "ExecutionOutput": { - "1": { - "version": 1, - "hash": "c2337099eba14767ead75fcc1b1fa265c1898461ede0b5e7758a0e8d11d1757d", - "action": "remove" - }, - "2": { - "version": 2, - "hash": "854fe9df5bcbb5c7e5b7c467bac423cd98c32f93d6876fea7b8eb6c08f6596da", - "action": "add" - } - }, - "CreateCustomImageChange": { - "3": { - "version": 3, - "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995", - "action": "add" - } - }, - "TwinAPIContextView": { - "1": { - "version": 1, - "hash": "7d368102d0b473009af3b8c46e0ea6d35893c5ebb172b373ad7d52553c12c9fa", - "action": "add" - } - }, - "CustomAPIView": { - "1": { - "version": 1, - "hash": "0b9afdd554f0b353c07256e2522342be1302b395d649f1cbabc555e5baecb150", - "action": "add" - } - }, - "CustomApiEndpoint": { - "1": { - "version": 1, - "hash": "13617f3dce60fa692421e0d9deda7ffd365ec02d4a062c18510b48457b6eba02", - "action": "add" - } - }, - "PrivateAPIEndpoint": { - "1": { - "version": 1, - "hash": "004ec19753263440e2896b4e35d7a6305322934512f473f37d54043af5726fe6", - "action": "add" - } - }, - "PublicAPIEndpoint": { - "1": { - "version": 1, - "hash": "5589b6bdd045ee9c45987dae78fd5a1124530a6c493e2328b304d9273b75177f", - "action": "add" - } - }, - "UpdateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "6d8effd404f15d4378b1ff3382e0622b9e5a637d9db342d43cfec00fe29c649a", - "action": "add" - } - }, - "CreateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "55e0a7b0ac428a6abb771ffcb925ee79cdd752a4b83058aa4b71fbef2a9fee63", - "action": "add" - } - }, - "TwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "e538734d20be3b477e188eb91f66600c2e654bb32e34806ef24329e48238bf18", - "action": "add" - } - }, - "SyftLog": { - "3": { - "version": 3, - "hash": "8964d48238672e0e5d5db6b932cda4ee8eb77581949ab3f7a38a05b1efec13b7", - "action": "remove" - }, - "4": { - "version": 4, - "hash": "ad6ef18ccd87fced669f3824d27ab423aaf52574b0cd4f720687aeaba77524e5", - "action": "add" - } - }, - "SyncState": { - "1": { - "version": 1, - "hash": "a0616775ec8ef0629e2d91e0df9cc4237ea3674727eda1ce367f1897ee35767d", - "action": "remove" - }, - "2": { - "version": 2, - "hash": "925f1b8ccd4b9d542700a111f9c4bdd28bfa55978d805ddb2fb3c108cc940d19", - "action": "add" - }, - "3": { - "version": 3, - "hash": "1b5fd28919cb496f8073a64a57736d477ace1ed969962b1b049cecf766f2661c", - "action": "add" - } - }, - "NodePeer": { - "3": { - "version": 3, - "hash": "ec0e39fc77ddb542558519d6a1f7c55f41cc037b6312792333792a04feea57e6", - "action": "add" - } - }, - "AssociationRequestChange": { - "1": { - "version": 1, - "hash": "508550c43e9e3f30243813c23eb6eec20918923d7ba09498cddbcd7e8bfa4539", - "action": "add" - } - }, - "APIEndpointQueueItem": { - "1": { - "version": 1, - "hash": "d31b2edfb767401c810584baccd27e4f566181c3ef7706618a82eb25ae20ff6d", - "action": "add" - } - }, - "NodeMetadataUpdate": { - "2": { - "version": 2, - "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed", - "action": "remove" - } - }, - "SyncStateItem": { - "1": { - "version": 1, - "hash": "4dbfa0813f5a3f7be0b36249ff2d67e395ad7c9e138c5a122fc7342b8dcc4b92", - "action": "remove" - } - }, - "VeilidConnection": { - "1": { - "version": 1, - "hash": "c1796e7b01c9eae0dbf59cfd5c2c2f0e7eba593e0cea615717246572b27aae4b", - "action": "remove" - } - }, - "CreateCustomWorkerPoolChange": { - "3": { - "version": 3, - "hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c", - "action": "add" - } - }, - "NodePeerUpdate": { - "1": { - "version": 1, - "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9", - "action": "add" - } - }, - "JobInfo": { - "2": { - "version": 2, - "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", - "action": "remove" - } - } - } - } -} diff --git a/packages/syft/src/syft/protocol/d06611a4a0de458d9257e04c975487e0.json b/packages/syft/src/syft/protocol/d06611a4a0de458d9257e04c975487e0.json deleted file mode 100644 index 375aa1af66b..00000000000 --- a/packages/syft/src/syft/protocol/d06611a4a0de458d9257e04c975487e0.json +++ /dev/null @@ -1,278 +0,0 @@ -{ - "1": { - "release_name": "0.8.2.json" - }, - "2": { - "release_name": "0.8.3.json" - }, - "3": { - "release_name": "0.8.4.json" - }, - "4": { - "release_name": "0.8.6.json" - }, - "dev": { - "object_versions": { - "NodeMetadata": { - "5": { - "version": 5, - "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8", - "action": "add" - } - }, - "SyftAPI": { - "3": { - "version": 3, - "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a", - "action": "add" - } - }, - "HTMLObject": { - "1": { - "version": 1, - "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863", - "action": "add" - } - }, - "NodeSettingsUpdate": { - "2": { - "version": 2, - "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261", - "action": "remove" - }, - "4": { - "version": 4, - "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0", - "action": "add" - }, - "5": { - "version": 5, - "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e", - "action": "add" - } - }, - "NodeSettings": { - "3": { - "version": 3, - "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1", - "action": "remove" - }, - "5": { - "version": 5, - "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e", - "action": "add" - }, - "6": { - "version": 6, - "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403", - "action": "add" - } - }, - "BlobRetrievalByURL": { - "5": { - "version": 5, - "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3", - "action": "add" - } - }, - "EnclaveMetadata": { - "2": { - "version": 2, - "hash": "6dcc26695abc6a9ecd9d7d1e6507a9f1a92cc5ccd10987e92419bf984245f9a1", - "action": "remove" - }, - "3": { - "version": 3, - "hash": "d2f23411927c68e2307a84d180ad053b3e4ba12d74aba64d34dac224c90e815d", - "action": "add" - } - }, - "CustomEndpointActionObject": { - "1": { - "version": 1, - "hash": "642facc6cafbaad4de030a33cd619bd68ac31a32b0db07ddc1c1d5d7f914503e", - "action": "add" - } - }, - "JobItem": { - "4": { - "version": 4, - "hash": "6a7cc7c2bb4dd234c1508b0af4d3b403cd3b7b427578a775bf80dc36891923ed", - "action": "remove" - }, - "6": { - "version": 6, - "hash": "865a2ed791b8abd20d76e9a6bfae7ae7dad51b5ebfd8ff728aab25af93fa5570", - "action": "add" - } - }, - "ExecutionOutput": { - "1": { - "version": 1, - "hash": "c2337099eba14767ead75fcc1b1fa265c1898461ede0b5e7758a0e8d11d1757d", - "action": "remove" - }, - "2": { - "version": 2, - "hash": "854fe9df5bcbb5c7e5b7c467bac423cd98c32f93d6876fea7b8eb6c08f6596da", - "action": "add" - } - }, - "CreateCustomImageChange": { - "3": { - "version": 3, - "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995", - "action": "add" - } - }, - "TwinAPIContextView": { - "1": { - "version": 1, - "hash": "7d368102d0b473009af3b8c46e0ea6d35893c5ebb172b373ad7d52553c12c9fa", - "action": "add" - } - }, - "CustomAPIView": { - "1": { - "version": 1, - "hash": "0b9afdd554f0b353c07256e2522342be1302b395d649f1cbabc555e5baecb150", - "action": "add" - } - }, - "CustomApiEndpoint": { - "1": { - "version": 1, - "hash": "13617f3dce60fa692421e0d9deda7ffd365ec02d4a062c18510b48457b6eba02", - "action": "add" - } - }, - "PrivateAPIEndpoint": { - "1": { - "version": 1, - "hash": "004ec19753263440e2896b4e35d7a6305322934512f473f37d54043af5726fe6", - "action": "add" - } - }, - "PublicAPIEndpoint": { - "1": { - "version": 1, - "hash": "5589b6bdd045ee9c45987dae78fd5a1124530a6c493e2328b304d9273b75177f", - "action": "add" - } - }, - "UpdateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "6d8effd404f15d4378b1ff3382e0622b9e5a637d9db342d43cfec00fe29c649a", - "action": "add" - } - }, - "CreateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "55e0a7b0ac428a6abb771ffcb925ee79cdd752a4b83058aa4b71fbef2a9fee63", - "action": "add" - } - }, - "TwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "e538734d20be3b477e188eb91f66600c2e654bb32e34806ef24329e48238bf18", - "action": "add" - } - }, - "SyftLog": { - "3": { - "version": 3, - "hash": "8964d48238672e0e5d5db6b932cda4ee8eb77581949ab3f7a38a05b1efec13b7", - "action": "remove" - }, - "4": { - "version": 4, - "hash": "ad6ef18ccd87fced669f3824d27ab423aaf52574b0cd4f720687aeaba77524e5", - "action": "add" - } - }, - "SyncState": { - "1": { - "version": 1, - "hash": "a0616775ec8ef0629e2d91e0df9cc4237ea3674727eda1ce367f1897ee35767d", - "action": "remove" - }, - "2": { - "version": 2, - "hash": "925f1b8ccd4b9d542700a111f9c4bdd28bfa55978d805ddb2fb3c108cc940d19", - "action": "add" - }, - "3": { - "version": 3, - "hash": "1b5fd28919cb496f8073a64a57736d477ace1ed969962b1b049cecf766f2661c", - "action": "add" - } - }, - "NodePeer": { - "3": { - "version": 3, - "hash": "ec0e39fc77ddb542558519d6a1f7c55f41cc037b6312792333792a04feea57e6", - "action": "add" - } - }, - "AssociationRequestChange": { - "1": { - "version": 1, - "hash": "508550c43e9e3f30243813c23eb6eec20918923d7ba09498cddbcd7e8bfa4539", - "action": "add" - } - }, - "APIEndpointQueueItem": { - "1": { - "version": 1, - "hash": "d31b2edfb767401c810584baccd27e4f566181c3ef7706618a82eb25ae20ff6d", - "action": "add" - } - }, - "NodeMetadataUpdate": { - "2": { - "version": 2, - "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed", - "action": "remove" - } - }, - "SyncStateItem": { - "1": { - "version": 1, - "hash": "4dbfa0813f5a3f7be0b36249ff2d67e395ad7c9e138c5a122fc7342b8dcc4b92", - "action": "remove" - } - }, - "VeilidConnection": { - "1": { - "version": 1, - "hash": "c1796e7b01c9eae0dbf59cfd5c2c2f0e7eba593e0cea615717246572b27aae4b", - "action": "remove" - } - }, - "CreateCustomWorkerPoolChange": { - "3": { - "version": 3, - "hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c", - "action": "add" - } - }, - "NodePeerUpdate": { - "1": { - "version": 1, - "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9", - "action": "add" - } - }, - "JobInfo": { - "2": { - "version": 2, - "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", - "action": "remove" - } - } - } - } -} diff --git a/packages/syft/src/syft/protocol/ed92a248b88e4484b6937cd58d4ad7c1.json b/packages/syft/src/syft/protocol/ed92a248b88e4484b6937cd58d4ad7c1.json deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/packages/syft/src/syft/protocol/f1c5b5baa491435fab432d198567ed7a.json b/packages/syft/src/syft/protocol/f1c5b5baa491435fab432d198567ed7a.json deleted file mode 100644 index 375aa1af66b..00000000000 --- a/packages/syft/src/syft/protocol/f1c5b5baa491435fab432d198567ed7a.json +++ /dev/null @@ -1,278 +0,0 @@ -{ - "1": { - "release_name": "0.8.2.json" - }, - "2": { - "release_name": "0.8.3.json" - }, - "3": { - "release_name": "0.8.4.json" - }, - "4": { - "release_name": "0.8.6.json" - }, - "dev": { - "object_versions": { - "NodeMetadata": { - "5": { - "version": 5, - "hash": "70197b4725dbdea0560ed8388e4d20b76808bee988f3630c5f916ee8f48761f8", - "action": "add" - } - }, - "SyftAPI": { - "3": { - "version": 3, - "hash": "b1b9d131a4f204ef2d56dc91bab3b945d5581080565232ede864f32015c0882a", - "action": "add" - } - }, - "HTMLObject": { - "1": { - "version": 1, - "hash": "010d9aaca95f3fdfc8d1f97d01c1bd66483da774a59275b310c08d6912f7f863", - "action": "add" - } - }, - "NodeSettingsUpdate": { - "2": { - "version": 2, - "hash": "e1dc9d2f30c4aae1f7359eb3fd44de5537788cd3c69be5f30c36fb019f07c261", - "action": "remove" - }, - "4": { - "version": 4, - "hash": "ec783a7cd097e2bc4273a519d11023c796aebb9e3710c1d8332c0e46966d4ae0", - "action": "add" - }, - "5": { - "version": 5, - "hash": "fd89638bb3d6dda9095905aab7ed2883f0b3dd5245900e8e141eec87921c2c9e", - "action": "add" - } - }, - "NodeSettings": { - "3": { - "version": 3, - "hash": "2d5f6e79f074f75b5cfc2357eac7cf635b8f083421009a513240b4dbbd5a0fc1", - "action": "remove" - }, - "5": { - "version": 5, - "hash": "cde18eb23fdffcfba47bc0e85efdbba1d59f1f5d6baa9c9690e1af14b35eb74e", - "action": "add" - }, - "6": { - "version": 6, - "hash": "986d201418035e59b12787dfaf60aa2af17817c1894ce42ab4b982ed73127403", - "action": "add" - } - }, - "BlobRetrievalByURL": { - "5": { - "version": 5, - "hash": "4934bf72bb10ac0a670c87ab735175088274e090819436563543473e64cf15e3", - "action": "add" - } - }, - "EnclaveMetadata": { - "2": { - "version": 2, - "hash": "6dcc26695abc6a9ecd9d7d1e6507a9f1a92cc5ccd10987e92419bf984245f9a1", - "action": "remove" - }, - "3": { - "version": 3, - "hash": "d2f23411927c68e2307a84d180ad053b3e4ba12d74aba64d34dac224c90e815d", - "action": "add" - } - }, - "CustomEndpointActionObject": { - "1": { - "version": 1, - "hash": "642facc6cafbaad4de030a33cd619bd68ac31a32b0db07ddc1c1d5d7f914503e", - "action": "add" - } - }, - "JobItem": { - "4": { - "version": 4, - "hash": "6a7cc7c2bb4dd234c1508b0af4d3b403cd3b7b427578a775bf80dc36891923ed", - "action": "remove" - }, - "6": { - "version": 6, - "hash": "865a2ed791b8abd20d76e9a6bfae7ae7dad51b5ebfd8ff728aab25af93fa5570", - "action": "add" - } - }, - "ExecutionOutput": { - "1": { - "version": 1, - "hash": "c2337099eba14767ead75fcc1b1fa265c1898461ede0b5e7758a0e8d11d1757d", - "action": "remove" - }, - "2": { - "version": 2, - "hash": "854fe9df5bcbb5c7e5b7c467bac423cd98c32f93d6876fea7b8eb6c08f6596da", - "action": "add" - } - }, - "CreateCustomImageChange": { - "3": { - "version": 3, - "hash": "e5f099940a7623f145f51f3e15b97a910a1d7fda1f67739420fed3035d1f2995", - "action": "add" - } - }, - "TwinAPIContextView": { - "1": { - "version": 1, - "hash": "7d368102d0b473009af3b8c46e0ea6d35893c5ebb172b373ad7d52553c12c9fa", - "action": "add" - } - }, - "CustomAPIView": { - "1": { - "version": 1, - "hash": "0b9afdd554f0b353c07256e2522342be1302b395d649f1cbabc555e5baecb150", - "action": "add" - } - }, - "CustomApiEndpoint": { - "1": { - "version": 1, - "hash": "13617f3dce60fa692421e0d9deda7ffd365ec02d4a062c18510b48457b6eba02", - "action": "add" - } - }, - "PrivateAPIEndpoint": { - "1": { - "version": 1, - "hash": "004ec19753263440e2896b4e35d7a6305322934512f473f37d54043af5726fe6", - "action": "add" - } - }, - "PublicAPIEndpoint": { - "1": { - "version": 1, - "hash": "5589b6bdd045ee9c45987dae78fd5a1124530a6c493e2328b304d9273b75177f", - "action": "add" - } - }, - "UpdateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "6d8effd404f15d4378b1ff3382e0622b9e5a637d9db342d43cfec00fe29c649a", - "action": "add" - } - }, - "CreateTwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "55e0a7b0ac428a6abb771ffcb925ee79cdd752a4b83058aa4b71fbef2a9fee63", - "action": "add" - } - }, - "TwinAPIEndpoint": { - "1": { - "version": 1, - "hash": "e538734d20be3b477e188eb91f66600c2e654bb32e34806ef24329e48238bf18", - "action": "add" - } - }, - "SyftLog": { - "3": { - "version": 3, - "hash": "8964d48238672e0e5d5db6b932cda4ee8eb77581949ab3f7a38a05b1efec13b7", - "action": "remove" - }, - "4": { - "version": 4, - "hash": "ad6ef18ccd87fced669f3824d27ab423aaf52574b0cd4f720687aeaba77524e5", - "action": "add" - } - }, - "SyncState": { - "1": { - "version": 1, - "hash": "a0616775ec8ef0629e2d91e0df9cc4237ea3674727eda1ce367f1897ee35767d", - "action": "remove" - }, - "2": { - "version": 2, - "hash": "925f1b8ccd4b9d542700a111f9c4bdd28bfa55978d805ddb2fb3c108cc940d19", - "action": "add" - }, - "3": { - "version": 3, - "hash": "1b5fd28919cb496f8073a64a57736d477ace1ed969962b1b049cecf766f2661c", - "action": "add" - } - }, - "NodePeer": { - "3": { - "version": 3, - "hash": "ec0e39fc77ddb542558519d6a1f7c55f41cc037b6312792333792a04feea57e6", - "action": "add" - } - }, - "AssociationRequestChange": { - "1": { - "version": 1, - "hash": "508550c43e9e3f30243813c23eb6eec20918923d7ba09498cddbcd7e8bfa4539", - "action": "add" - } - }, - "APIEndpointQueueItem": { - "1": { - "version": 1, - "hash": "d31b2edfb767401c810584baccd27e4f566181c3ef7706618a82eb25ae20ff6d", - "action": "add" - } - }, - "NodeMetadataUpdate": { - "2": { - "version": 2, - "hash": "520ae8ffc0c057ffa827cb7b267a19fb6b92e3cf3c0a3666ac34e271b6dd0aed", - "action": "remove" - } - }, - "SyncStateItem": { - "1": { - "version": 1, - "hash": "4dbfa0813f5a3f7be0b36249ff2d67e395ad7c9e138c5a122fc7342b8dcc4b92", - "action": "remove" - } - }, - "VeilidConnection": { - "1": { - "version": 1, - "hash": "c1796e7b01c9eae0dbf59cfd5c2c2f0e7eba593e0cea615717246572b27aae4b", - "action": "remove" - } - }, - "CreateCustomWorkerPoolChange": { - "3": { - "version": 3, - "hash": "e982f2ebcdc6fe23a65a014109e33ba7c487bb7ca5623723cf5ec7642f86828c", - "action": "add" - } - }, - "NodePeerUpdate": { - "1": { - "version": 1, - "hash": "9e7cd39f6a9f90e8c595452865525e0989df1688236acfd1a665ed047ba47de9", - "action": "add" - } - }, - "JobInfo": { - "2": { - "version": 2, - "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", - "action": "remove" - } - } - } - } -} From ea7e3eae81c59e570247b6b8075c01a25065f922 Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 14 Jun 2024 18:01:53 +0300 Subject: [PATCH 146/313] fix notebooks --- notebooks/api/0.8/04-pytorch-example.ipynb | 50 +- notebooks/api/0.8/05-custom-policy.ipynb | 4 +- notebooks/api/0.8/10-container-images.ipynb | 4 +- .../migration/0-prepare-migration-data.ipynb | 408 +++++++ .../migration/1-connect-and-migrate.ipynb | 1016 +++++++++++++++++ notebooks/experimental/migration/log086 | Bin 0 -> 800 bytes 6 files changed, 1458 insertions(+), 24 deletions(-) create mode 100644 notebooks/experimental/migration/0-prepare-migration-data.ipynb create mode 100644 notebooks/experimental/migration/1-connect-and-migrate.ipynb create mode 100644 notebooks/experimental/migration/log086 diff --git a/notebooks/api/0.8/04-pytorch-example.ipynb b/notebooks/api/0.8/04-pytorch-example.ipynb index 05a18badf4e..5cbc89ecaea 100644 --- a/notebooks/api/0.8/04-pytorch-example.ipynb +++ b/notebooks/api/0.8/04-pytorch-example.ipynb @@ -128,7 +128,7 @@ }, "outputs": [], "source": [ - "train_domain_obj = domain_client.api.services.action.set(train)\n", + "train_domain_obj = train.send(domain_client)\n", "type(train_domain_obj)" ] }, @@ -139,7 +139,7 @@ "metadata": {}, "outputs": [], "source": [ - "assert torch.round(train_domain_obj.syft_action_data.sum()) == 1557" + "train_domain_obj" ] }, { @@ -148,6 +148,16 @@ "id": "11", "metadata": {}, "outputs": [], + "source": [ + "assert torch.round(train_domain_obj.syft_action_data.sum()) == 1557" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "12", + "metadata": {}, + "outputs": [], "source": [ "class MLP(nn.Module):\n", " def __init__(self, out_dims):\n", @@ -171,7 +181,7 @@ { "cell_type": "code", "execution_count": null, - "id": "12", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -181,7 +191,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13", + "id": "14", "metadata": { "tags": [] }, @@ -193,7 +203,7 @@ { "cell_type": "code", "execution_count": null, - "id": "14", + "id": "15", "metadata": { "tags": [] }, @@ -205,7 +215,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15", + "id": "16", "metadata": { "tags": [] }, @@ -217,19 +227,19 @@ { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "17", "metadata": { "tags": [] }, "outputs": [], "source": [ - "weight_domain_obj = domain_client.api.services.action.set(w)" + "weight_domain_obj = w.send(domain_client)" ] }, { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "18", "metadata": { "tags": [] }, @@ -276,7 +286,7 @@ { "cell_type": "code", "execution_count": null, - "id": "18", + "id": "19", "metadata": { "tags": [] }, @@ -289,7 +299,7 @@ { "cell_type": "code", "execution_count": null, - "id": "19", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -299,7 +309,7 @@ { "cell_type": "code", "execution_count": null, - "id": "20", + "id": "21", "metadata": { "tags": [] }, @@ -312,7 +322,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21", + "id": "22", "metadata": { "tags": [] }, @@ -324,7 +334,7 @@ { "cell_type": "code", "execution_count": null, - "id": "22", + "id": "23", "metadata": { "tags": [] }, @@ -337,7 +347,7 @@ { "cell_type": "code", "execution_count": null, - "id": "23", + "id": "24", "metadata": { "tags": [] }, @@ -349,7 +359,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -359,7 +369,7 @@ { "cell_type": "code", "execution_count": null, - "id": "25", + "id": "26", "metadata": { "tags": [] }, @@ -371,7 +381,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "27", "metadata": { "tags": [] }, @@ -384,7 +394,7 @@ { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "28", "metadata": {}, "outputs": [], "source": [] @@ -406,7 +416,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.1.-1" }, "toc": { "base_numbering": 1, diff --git a/notebooks/api/0.8/05-custom-policy.ipynb b/notebooks/api/0.8/05-custom-policy.ipynb index fac27bfcbe8..aece45f0192 100644 --- a/notebooks/api/0.8/05-custom-policy.ipynb +++ b/notebooks/api/0.8/05-custom-policy.ipynb @@ -231,7 +231,7 @@ "metadata": {}, "outputs": [], "source": [ - "domain_client.api.services.action.set(x_pointer)" + "x_pointer = x_pointer.send(domain_client)" ] }, { @@ -637,7 +637,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0rc1" + "version": "3.1.-1" }, "toc": { "base_numbering": 1, diff --git a/notebooks/api/0.8/10-container-images.ipynb b/notebooks/api/0.8/10-container-images.ipynb index b0bacf0295f..e080146b63e 100644 --- a/notebooks/api/0.8/10-container-images.ipynb +++ b/notebooks/api/0.8/10-container-images.ipynb @@ -831,7 +831,7 @@ "data = np.array([1, 2, 3])\n", "data_action_obj = sy.ActionObject.from_obj(data)\n", "\n", - "data_pointer = domain_client.api.services.action.set(data_action_obj)\n", + "data_pointer = data_action_obj.send(domain_client)\n", "data_pointer" ] }, @@ -1483,7 +1483,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.11.8" } }, "nbformat": 4, diff --git a/notebooks/experimental/migration/0-prepare-migration-data.ipynb b/notebooks/experimental/migration/0-prepare-migration-data.ipynb new file mode 100644 index 00000000000..0eaba026143 --- /dev/null +++ b/notebooks/experimental/migration/0-prepare-migration-data.ipynb @@ -0,0 +1,408 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "05a33e56-64df-408b-bfa0-a9a73086a916", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute\n", + "import syft as sy\n", + "from syft import ActionObject\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "276409fc-02bd-4bfb-a958-dfcf399af76b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'0.8.6'" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sy.__version__" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "30ce91ba", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"k8s\"" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "19faad95-736d-4ad2-b650-da37a6a4ee8a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "node = sy.orchestra.launch(name=\"syft-example-domain-1\", deploy_to=\"k8s\")\n", + "node" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "8dc8067e-c238-4bb6-a5b5-7f53bb5175f7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as GUEST\n" + ] + } + ], + "source": [ + "client = node.login(email=\"info@openmined.org\", password=\"Zp33HpLoC2cHVYsYuyOD6Q7M2v7NKHdt\")" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "e9fd34c6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftError: No user exists with info@openmined.org and supplied password.

" + ], + "text/plain": [ + "SyftError: No user exists with info@openmined.org and supplied password." + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "41685ebe-734e-4b72-9c8c-577e7c0fa36c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess: User 'John Doe' successfully registered! To see users, run `[your_client].users`

" + ], + "text/plain": [ + "SyftSuccess: User 'John Doe' successfully registered! To see users, run `[your_client].users`" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.register(\n", + " email=\"ds@openmined.org\", name=\"John Doe\", password=\"pw\", password_verify=\"pw\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "86be45d6-b973-472f-9cc2-44aa51b8cdce", + "metadata": {}, + "source": [ + "# Prepare some data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bc4eac13-013b-4841-9391-0f026a043746", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + } + ], + "source": [ + "client_ds = node.login(email=\"ds@openmined.org\", password=\"pw\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "705c0d71-ec57-4707-bebf-899ad4669010", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 1/1 [00:00<00:00, 11.69it/s]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Uploading: numpy-data\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftSuccess: Dataset uploaded to 'test_upgradbility'. To see the datasets uploaded by a client on this node, use command `[your_client].datasets`

" + ], + "text/plain": [ + "SyftSuccess: Dataset uploaded to 'test_upgradbility'. To see the datasets uploaded by a client on this node, use command `[your_client].datasets`" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dataset = sy.Dataset(\n", + " name=\"my-dataset\",\n", + " description=\"abc\",\n", + " asset_list=[\n", + " sy.Asset(\n", + " name=\"numpy-data\",\n", + " mock=np.array([10, 11, 12, 13, 14]),\n", + " data=np.array([15, 16, 17, 18, 19]),\n", + " mock_is_real=True,\n", + " )\n", + " ],\n", + ")\n", + "\n", + "client.upload_dataset(dataset)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f0f5d9e2-4fa3-48fb-bf09-67c5821fc439", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess: Syft function 'compute_mean' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" + ], + "text/plain": [ + "SyftSuccess: Syft function 'compute_mean' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "data_low = client_ds.datasets[0].assets[0]\n", + "\n", + "@sy.syft_function_single_use(data=data_low)\n", + "def compute_mean(data) -> float:\n", + " print(\"Computing mean...\")\n", + " return data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e47e1e4e-ae5b-420f-8daf-024dde9b1753", + "metadata": {}, + "outputs": [], + "source": [ + "req = client_ds.code.request_code_execution(compute_mean)" + ] + }, + { + "cell_type": "markdown", + "id": "4348d23b-1fbb-4510-9ddb-2c8bb2a157c7", + "metadata": {}, + "source": [ + "## TODO: add more data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "273e5d98-054f-4945-8e76-9d5585638866", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Approving request for domain test_upgradbility\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftSuccess: Request 0e2a01b055a247eda6ce5a288e73484c changes applied

" + ], + "text/plain": [ + "SyftSuccess: Request 0e2a01b055a247eda6ce5a288e73484c changes applied" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client.requests[0].approve()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8ebd189c-b52a-4fee-bc55-f28bdd87b902", + "metadata": {}, + "outputs": [], + "source": [ + "job = client_ds.code.compute_mean(data=data_low, blocking=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a9eb0e15-7a93-4c18-bb15-c6939684b565", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "28/05/24 11:37:30 FUNCTION LOG (9598bc5afa504c67aa8c33265bebd4ed): Computing mean...\n" + ] + } + ], + "source": [ + "res = job.wait()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2efa6b31-2739-4d47-908c-5c04c43c5cbd", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([15, 16, 17, 18, 19])" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "res.get()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3ee26222-3ff4-4783-a41d-73eb8368489c", + "metadata": {}, + "outputs": [], + "source": [ + "# job.wait()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31137c30-07e3-47a4-beea-a3e1f6e0f46d", + "metadata": {}, + "outputs": [], + "source": [ + "# res" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6f72f411-74bb-469f-8d61-ffc65e56eb56", + "metadata": {}, + "outputs": [], + "source": [ + "# result = job.wait().get()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b8498780-814f-48b7-9a82-a9a5af4691f1", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "syft_0.8.6", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/experimental/migration/1-connect-and-migrate.ipynb b/notebooks/experimental/migration/1-connect-and-migrate.ipynb new file mode 100644 index 00000000000..8f806ac2513 --- /dev/null +++ b/notebooks/experimental/migration/1-connect-and-migrate.ipynb @@ -0,0 +1,1016 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "8549a375-cac4-4971-a113-d18e45774e62", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute\n", + "import syft as sy\n", + "from syft.serde.recursive import TYPE_BANK\n", + "from syft.service.log.log import SyftLog\n", + "from syft.types.syft_object_registry import SyftObjectRegistry\n", + "\n", + "from syft.protocol.data_protocol import get_data_protocol" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "de15efbc-e962-4e67-b9cd-ed33bf7198d2", + "metadata": {}, + "outputs": [], + "source": [ + "dp = get_data_protocol()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "7997dc38-8c3b-4a06-a050-d9453bfc0f3d", + "metadata": {}, + "outputs": [], + "source": [ + "# dp.protocol_history.keys()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "c6cef09b-0727-45f4-960e-56c2a85259a5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Staging Protocol Changes...\n", + "Object in Document Store that needs migration: [, , , , , ]\n", + "Migrating data for: NodeSettings table.\n", + "Migrating data for: SyftLog table.\n", + "Migrating data for: JobItem table.\n", + "Migrating data for: NodePeer table.\n", + "Migrating data for: SyncState table.\n", + "Migrating data for: ExecutionOutput table.\n", + "could not find ExecutionOutput 1 in ObjectRegistry\n", + "('ExecutionOutput', 1)\n" + ] + }, + { + "ename": "Exception", + "evalue": "Failed to migrate data for ExecutionOutput. Error: cannot unpack non-iterable NoneType object", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mException\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[4], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m node \u001b[38;5;241m=\u001b[39m sy\u001b[38;5;241m.\u001b[39morchestra\u001b[38;5;241m.\u001b[39mlaunch(\n\u001b[1;32m 2\u001b[0m name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtest_upgradbility\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 3\u001b[0m dev_mode\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[1;32m 4\u001b[0m local_db\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[1;32m 5\u001b[0m n_consumers\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m2\u001b[39m,\n\u001b[1;32m 6\u001b[0m create_producer\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[1;32m 7\u001b[0m )\n", + "File \u001b[0;32m~/OpenMined/PySyft/packages/syft/src/syft/orchestra.py:302\u001b[0m, in \u001b[0;36mOrchestra.launch\u001b[0;34m(name, node_type, deploy_to, node_side_type, port, processes, local_db, dev_mode, reset, tail, host, enable_warnings, n_consumers, thread_workers, create_producer, queue_port, association_request_auto_approval, background_tasks)\u001b[0m\n\u001b[1;32m 297\u001b[0m deployment_type_enum: DeploymentType \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m=\u001b[39m get_deployment_type(\n\u001b[1;32m 298\u001b[0m deployment_type\u001b[38;5;241m=\u001b[39mdeploy_to\n\u001b[1;32m 299\u001b[0m )\n\u001b[1;32m 301\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m deployment_type_enum \u001b[38;5;241m==\u001b[39m DeploymentType\u001b[38;5;241m.\u001b[39mPYTHON:\n\u001b[0;32m--> 302\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m deploy_to_python(\n\u001b[1;32m 303\u001b[0m node_type_enum\u001b[38;5;241m=\u001b[39mnode_type_enum,\n\u001b[1;32m 304\u001b[0m deployment_type_enum\u001b[38;5;241m=\u001b[39mdeployment_type_enum,\n\u001b[1;32m 305\u001b[0m port\u001b[38;5;241m=\u001b[39mport,\n\u001b[1;32m 306\u001b[0m name\u001b[38;5;241m=\u001b[39mname,\n\u001b[1;32m 307\u001b[0m host\u001b[38;5;241m=\u001b[39mhost,\n\u001b[1;32m 308\u001b[0m reset\u001b[38;5;241m=\u001b[39mreset,\n\u001b[1;32m 309\u001b[0m tail\u001b[38;5;241m=\u001b[39mtail,\n\u001b[1;32m 310\u001b[0m dev_mode\u001b[38;5;241m=\u001b[39mdev_mode,\n\u001b[1;32m 311\u001b[0m processes\u001b[38;5;241m=\u001b[39mprocesses,\n\u001b[1;32m 312\u001b[0m local_db\u001b[38;5;241m=\u001b[39mlocal_db,\n\u001b[1;32m 313\u001b[0m node_side_type\u001b[38;5;241m=\u001b[39mnode_side_type_enum,\n\u001b[1;32m 314\u001b[0m enable_warnings\u001b[38;5;241m=\u001b[39menable_warnings,\n\u001b[1;32m 315\u001b[0m n_consumers\u001b[38;5;241m=\u001b[39mn_consumers,\n\u001b[1;32m 316\u001b[0m thread_workers\u001b[38;5;241m=\u001b[39mthread_workers,\n\u001b[1;32m 317\u001b[0m create_producer\u001b[38;5;241m=\u001b[39mcreate_producer,\n\u001b[1;32m 318\u001b[0m queue_port\u001b[38;5;241m=\u001b[39mqueue_port,\n\u001b[1;32m 319\u001b[0m association_request_auto_approval\u001b[38;5;241m=\u001b[39massociation_request_auto_approval,\n\u001b[1;32m 320\u001b[0m background_tasks\u001b[38;5;241m=\u001b[39mbackground_tasks,\n\u001b[1;32m 321\u001b[0m )\n\u001b[1;32m 322\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m deployment_type_enum \u001b[38;5;241m==\u001b[39m DeploymentType\u001b[38;5;241m.\u001b[39mREMOTE:\n\u001b[1;32m 323\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m deploy_to_remote(\n\u001b[1;32m 324\u001b[0m node_type_enum\u001b[38;5;241m=\u001b[39mnode_type_enum,\n\u001b[1;32m 325\u001b[0m deployment_type_enum\u001b[38;5;241m=\u001b[39mdeployment_type_enum,\n\u001b[1;32m 326\u001b[0m name\u001b[38;5;241m=\u001b[39mname,\n\u001b[1;32m 327\u001b[0m node_side_type\u001b[38;5;241m=\u001b[39mnode_side_type_enum,\n\u001b[1;32m 328\u001b[0m )\n", + "File \u001b[0;32m~/OpenMined/PySyft/packages/syft/src/syft/orchestra.py:227\u001b[0m, in \u001b[0;36mdeploy_to_python\u001b[0;34m(node_type_enum, deployment_type_enum, port, name, host, reset, tail, dev_mode, processes, local_db, node_side_type, enable_warnings, n_consumers, thread_workers, create_producer, queue_port, association_request_auto_approval, background_tasks)\u001b[0m\n\u001b[1;32m 225\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnode_type\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m sig\u001b[38;5;241m.\u001b[39mparameters\u001b[38;5;241m.\u001b[39mkeys() \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmigrate\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m sig\u001b[38;5;241m.\u001b[39mparameters:\n\u001b[1;32m 226\u001b[0m supported_kwargs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmigrate\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m--> 227\u001b[0m worker \u001b[38;5;241m=\u001b[39m worker_class\u001b[38;5;241m.\u001b[39mnamed(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39msupported_kwargs)\n\u001b[1;32m 228\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 229\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mNotImplementedError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnode_type: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mnode_type_enum\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m is not supported\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", + "File \u001b[0;32m~/OpenMined/PySyft/packages/syft/src/syft/node/node.py:638\u001b[0m, in \u001b[0;36mNode.named\u001b[0;34m(cls, name, processes, reset, local_db, node_type, node_side_type, enable_warnings, n_consumers, thread_workers, create_producer, queue_port, dev_mode, migrate, in_memory_workers, association_request_auto_approval, background_tasks)\u001b[0m\n\u001b[1;32m 635\u001b[0m node_type \u001b[38;5;241m=\u001b[39m NodeType(node_type)\n\u001b[1;32m 636\u001b[0m node_side_type \u001b[38;5;241m=\u001b[39m NodeSideType(node_side_type)\n\u001b[0;32m--> 638\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mcls\u001b[39m(\n\u001b[1;32m 639\u001b[0m name\u001b[38;5;241m=\u001b[39mname,\n\u001b[1;32m 640\u001b[0m \u001b[38;5;28mid\u001b[39m\u001b[38;5;241m=\u001b[39muid,\n\u001b[1;32m 641\u001b[0m signing_key\u001b[38;5;241m=\u001b[39mkey,\n\u001b[1;32m 642\u001b[0m processes\u001b[38;5;241m=\u001b[39mprocesses,\n\u001b[1;32m 643\u001b[0m local_db\u001b[38;5;241m=\u001b[39mlocal_db,\n\u001b[1;32m 644\u001b[0m node_type\u001b[38;5;241m=\u001b[39mnode_type,\n\u001b[1;32m 645\u001b[0m node_side_type\u001b[38;5;241m=\u001b[39mnode_side_type,\n\u001b[1;32m 646\u001b[0m enable_warnings\u001b[38;5;241m=\u001b[39menable_warnings,\n\u001b[1;32m 647\u001b[0m blob_storage_config\u001b[38;5;241m=\u001b[39mblob_storage_config,\n\u001b[1;32m 648\u001b[0m queue_port\u001b[38;5;241m=\u001b[39mqueue_port,\n\u001b[1;32m 649\u001b[0m n_consumers\u001b[38;5;241m=\u001b[39mn_consumers,\n\u001b[1;32m 650\u001b[0m thread_workers\u001b[38;5;241m=\u001b[39mthread_workers,\n\u001b[1;32m 651\u001b[0m create_producer\u001b[38;5;241m=\u001b[39mcreate_producer,\n\u001b[1;32m 652\u001b[0m dev_mode\u001b[38;5;241m=\u001b[39mdev_mode,\n\u001b[1;32m 653\u001b[0m migrate\u001b[38;5;241m=\u001b[39mmigrate,\n\u001b[1;32m 654\u001b[0m in_memory_workers\u001b[38;5;241m=\u001b[39min_memory_workers,\n\u001b[1;32m 655\u001b[0m reset\u001b[38;5;241m=\u001b[39mreset,\n\u001b[1;32m 656\u001b[0m association_request_auto_approval\u001b[38;5;241m=\u001b[39massociation_request_auto_approval,\n\u001b[1;32m 657\u001b[0m background_tasks\u001b[38;5;241m=\u001b[39mbackground_tasks,\n\u001b[1;32m 658\u001b[0m )\n", + "File \u001b[0;32m~/OpenMined/PySyft/packages/syft/src/syft/node/node.py:413\u001b[0m, in \u001b[0;36mNode.__init__\u001b[0;34m(self, name, id, signing_key, action_store_config, document_store_config, root_email, root_username, root_password, processes, is_subprocess, node_type, local_db, reset, blob_storage_config, queue_config, queue_port, n_consumers, create_producer, thread_workers, node_side_type, enable_warnings, dev_mode, migrate, in_memory_workers, smtp_username, smtp_password, email_sender, smtp_port, smtp_host, association_request_auto_approval, background_tasks)\u001b[0m\n\u001b[1;32m 411\u001b[0m \u001b[38;5;66;03m# Migrate data before any operation on db\u001b[39;00m\n\u001b[1;32m 412\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m migrate:\n\u001b[0;32m--> 413\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfind_and_migrate_data()\n\u001b[1;32m 415\u001b[0m \u001b[38;5;66;03m# first migrate, for backwards compatibility\u001b[39;00m\n\u001b[1;32m 416\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39minit_queue_manager(queue_config\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mqueue_config)\n", + "File \u001b[0;32m~/OpenMined/PySyft/packages/syft/src/syft/node/node.py:746\u001b[0m, in \u001b[0;36mNode.find_and_migrate_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 742\u001b[0m migration_status \u001b[38;5;241m=\u001b[39m object_partition\u001b[38;5;241m.\u001b[39mmigrate_data(\n\u001b[1;32m 743\u001b[0m to_klass\u001b[38;5;241m=\u001b[39mobject_type, context\u001b[38;5;241m=\u001b[39mcontext\n\u001b[1;32m 744\u001b[0m )\n\u001b[1;32m 745\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m migration_status\u001b[38;5;241m.\u001b[39mis_err():\n\u001b[0;32m--> 746\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m(\n\u001b[1;32m 747\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFailed to migrate data for \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mcanonical_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m. Error: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mmigration_status\u001b[38;5;241m.\u001b[39merr()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 748\u001b[0m )\n\u001b[1;32m 750\u001b[0m \u001b[38;5;66;03m# Track all object types from action store\u001b[39;00m\n\u001b[1;32m 751\u001b[0m action_object_types \u001b[38;5;241m=\u001b[39m [Action, ActionObject]\n", + "\u001b[0;31mException\u001b[0m: Failed to migrate data for ExecutionOutput. Error: cannot unpack non-iterable NoneType object" + ] + } + ], + "source": [ + "node = sy.orchestra.launch(\n", + " name=\"test_upgradbility\",\n", + " dev_mode=True,\n", + " local_db=True,\n", + " n_consumers=2,\n", + " create_producer=True,\n", + ")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "62e2095b-5415-4e51-bccc-2671b2aecd53", + "metadata": {}, + "outputs": [], + "source": [ + "%debug" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "52595e37-80e9-4432-8336-6eba3428b861", + "metadata": {}, + "outputs": [], + "source": [ + "# %debug" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "157814cf-7a8e-48b4-97e4-0a79d50f6c91", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "588875b5-4240-4189-a35f-f570dff109cb", + "metadata": {}, + "source": [ + "## Appendix" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fbc74a67-3ba5-4fdf-93bb-c419fab0bafc", + "metadata": {}, + "outputs": [], + "source": [ + "log = SyftLog(job_id=sy.UID())" + ] + }, + { + "cell_type": "markdown", + "id": "af822756-804a-4919-ab81-c74bfcabedd3", + "metadata": {}, + "source": [ + "eventually there are 3 cases\n", + "- its s SyftObject with canonical_name and version (this works)\n", + "- its not a syftobject, it still has a canonical_name and version (currently this is handled by splitting the fqn) (requires refactor)\n", + "\n", + "- it has a fqn (can either be SyftObject or non SyftObject) from previous version\n", + " - For deserialization of SyftObjects, this will give you the latest object\n", + " \n", + "\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3910e732-c144-453b-834e-f9a1f3e1638e", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "# we want to know, for this fqn, what is the object version. It should always be the one described in 4, because thats \n", + "# the only migration we support\n", + "# new_version = any([v[\"action\"] == \"add\" for v in dp.protocol_history[\"dev\"][\"object_versions\"][\"SyftLog\"].values()])\n", + " # has the mapping of all the protocol version and classes version that are added or removed." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "49f85396-67c1-468e-8ef9-e79a7b03bbd2", + "metadata": {}, + "outputs": [], + "source": [ + "# SyftLog.__canonical_name__" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "283d2122-31cf-48a6-8353-58644c930402", + "metadata": {}, + "outputs": [], + "source": [ + "# version_mutations" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "321b6994-3574-4c88-89ba-b4fb4070e9e3", + "metadata": {}, + "outputs": [], + "source": [ + "# bts = sy.serialize(log, to_bytes=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f67a2444-3b34-42f3-9c61-10cbc0cfb9aa", + "metadata": {}, + "outputs": [], + "source": [ + "# with open(\"log086\", \"rb\") as f:\n", + "# bts = f.read()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d879fa76-390e-4662-87df-1d9a694ac766", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "SyftLog 3\n", + "3\n" + ] + } + ], + "source": [ + "log_des = sy.deserialize(bts, from_bytes=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "89b06471-3196-482e-abc6-41ff486fa70c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class SyftLogV3:\n", + " id: str = a0adb0f277f54935acf25dc966a1de77\n", + " stdout: str = \"ABC\"\n", + " stderr: str = \"DEF\"\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.log.log.SyftLogV3" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "log_des" + ] + }, + { + "cell_type": "markdown", + "id": "cf4dab58-1727-4e28-b4c0-c3442ce0f553", + "metadata": {}, + "source": [ + "if we dont have an old version, how are we going to test?\n", + "\n", + "- release a beta and test from there? (hard to test during development)\n", + "- from a branch? (how does this work with protocol versions?)\n", + "- if there is a fqn, we assume that its 0.8.6?\n", + "- " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4fe63393-d834-42cd-b697-65e54a08026d", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "TYPE_BANK[\"syft.service.log.log.SyftLog\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b31309c4-2819-4a14-983e-ca62f537ebda", + "metadata": {}, + "outputs": [], + "source": [ + "#how do we test, because the old version does not contain any information" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ef0351a9-1fdb-488d-bb9b-d0e6016121fa", + "metadata": {}, + "outputs": [], + "source": [ + "from syft.types.syft_object import SyftObjectRegistry" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "14aaac2b-8ad9-413f-be3d-d7369763f7e9", + "metadata": {}, + "outputs": [], + "source": [ + "SyftObjectRegistry.__object_version_registry__[\"SyftLog_3\"]" + ] + }, + { + "cell_type": "markdown", + "id": "5321f915-7b8c-471f-a5e4-261d764f961f", + "metadata": {}, + "source": [ + "currently `fqn` -> all serialization properties\n", + "\n", + "this is problematic because `fqn` can point to different object versions (or to nothing) in different versions\n", + "\n", + "instead we want `version_string` to point to all those things\n", + "\n", + "\n", + "we could either\n", + "- remove TYPE_BANK\n", + "- make `version_string` point to `fqn` which and get the serialization properties directly\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eea8fd60-db50-4ca1-8087-f763b845f90e", + "metadata": {}, + "outputs": [], + "source": [ + "def get_obj_registry_str_from_fqn(fqn):\n", + " (nonrecursive,\n", + " serialize,\n", + " deserialize,\n", + " attribute_list,\n", + " exclude_attrs_list,\n", + " serde_overrides,\n", + " hash_exclude_attrs,\n", + " cls,\n", + " attribute_types,\n", + " version) = TYPE_BANK[fqn]\n", + " object_registry_str = f\"{cls.__canonical_name__}_{cls.__version__}\"\n", + "\n", + " return object_registry_str\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7acfac72-7814-4237-9819-b9d9175c995d", + "metadata": {}, + "outputs": [], + "source": [ + "name = get_obj_registry_str_from_fqn(\"syft.service.log.log.SyftLog\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "89a91634-27e7-4537-a32f-c56d1f4c5c9c", + "metadata": {}, + "outputs": [], + "source": [ + "SyftObjectRegistry.__object_version_registry__[name]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5625828b-773b-4f0a-bedb-b22b964d3eaf", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b9814e27-d302-4b1a-a82f-38a2a52bcb8c", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a104d726-806d-4427-ba3d-0a91cc2cccfe", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "60e3a4d4-6f32-4025-bffe-041b895f0c97", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "# for x in SyftObjectRegistry.__object_version_registry__.keys():\n", + "# print(x)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "25c3b6b3-624b-40c9-9fd8-f24ef003274c", + "metadata": {}, + "outputs": [], + "source": [ + "sy.__version__" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "43da857d-1054-4302-9656-2a51fadb9d00", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b1154e9f-3002-4e8f-a5f9-d097423d5702", + "metadata": {}, + "outputs": [], + "source": [ + "with open(\"log086\", \"rb\") as f:\n", + " bts = f.read()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "85f57681-b046-470a-a266-d4819c47f295", + "metadata": {}, + "outputs": [ + { + "ename": "ValidationError", + "evalue": "1 validation error for SyftLog\njob_id\n Field required [type=missing, input_value={'id': 1\u001b[0m log \u001b[38;5;241m=\u001b[39m \u001b[43msy\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdeserialize\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbts\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfrom_bytes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/serde/deserialize.py:29\u001b[0m, in \u001b[0;36m_deserialize\u001b[0;34m(blob, from_proto, from_bytes)\u001b[0m\n\u001b[1;32m 26\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mWrong deserialization format.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 28\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m from_bytes:\n\u001b[0;32m---> 29\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mrs_bytes2object\u001b[49m\u001b[43m(\u001b[49m\u001b[43mblob\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 31\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m from_proto:\n\u001b[1;32m 32\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m rs_proto2object(blob)\n", + "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/serde/recursive.py:291\u001b[0m, in \u001b[0;36mrs_bytes2object\u001b[0;34m(blob)\u001b[0m\n\u001b[1;32m 286\u001b[0m MAX_TRAVERSAL_LIMIT \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m2\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m64\u001b[39m \u001b[38;5;241m-\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m 288\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m recursive_scheme\u001b[38;5;241m.\u001b[39mfrom_bytes(\n\u001b[1;32m 289\u001b[0m blob, traversal_limit_in_words\u001b[38;5;241m=\u001b[39mMAX_TRAVERSAL_LIMIT\n\u001b[1;32m 290\u001b[0m ) \u001b[38;5;28;01mas\u001b[39;00m msg:\n\u001b[0;32m--> 291\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mrs_proto2object\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmsg\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/serde/recursive.py:382\u001b[0m, in \u001b[0;36mrs_proto2object\u001b[0;34m(proto)\u001b[0m\n\u001b[1;32m 380\u001b[0m \u001b[38;5;28msetattr\u001b[39m(obj, attr_name, attr_value)\n\u001b[1;32m 381\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 382\u001b[0m obj \u001b[38;5;241m=\u001b[39m \u001b[43mclass_type\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 384\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 385\u001b[0m obj \u001b[38;5;241m=\u001b[39m class_type\u001b[38;5;241m.\u001b[39m\u001b[38;5;21m__new__\u001b[39m(class_type) \u001b[38;5;66;03m# type: ignore\u001b[39;00m\n", + "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/types/syft_object.py:590\u001b[0m, in \u001b[0;36mSyftObject.__init__\u001b[0;34m(self, **kwargs)\u001b[0m\n\u001b[1;32m 589\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__init__\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 590\u001b[0m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;21;43m__init__\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 591\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_syft_set_validate_private_attrs_(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 592\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m__post_init__()\n", + "File \u001b[0;32m/opt/anaconda3/envs/syft/lib/python3.12/site-packages/pydantic/main.py:171\u001b[0m, in \u001b[0;36mBaseModel.__init__\u001b[0;34m(self, **data)\u001b[0m\n\u001b[1;32m 169\u001b[0m \u001b[38;5;66;03m# `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks\u001b[39;00m\n\u001b[1;32m 170\u001b[0m __tracebackhide__ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m--> 171\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__pydantic_validator__\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mvalidate_python\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mself_instance\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mValidationError\u001b[0m: 1 validation error for SyftLog\njob_id\n Field required [type=missing, input_value={'id': \u001b[0;32m/opt/anaconda3/envs/syft/lib/python3.12/site-packages/pydantic/main.py\u001b[0m(171)\u001b[0;36m__init__\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 169 \u001b[0;31m \u001b[0;31m# `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 170 \u001b[0;31m \u001b[0m__tracebackhide__\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m--> 171 \u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__pydantic_validator__\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalidate_python\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself_instance\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 172 \u001b[0;31m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 173 \u001b[0;31m \u001b[0;31m# The following line sets a flag that we use to determine when `__init__` gets overridden by the user\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> u\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "> \u001b[0;32m/Users/koen/workspace/PySyft/packages/syft/src/syft/types/syft_object.py\u001b[0m(590)\u001b[0;36m__init__\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 588 \u001b[0;31m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 589 \u001b[0;31m \u001b[0;32mdef\u001b[0m \u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mAny\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m--> 590 \u001b[0;31m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 591 \u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_syft_set_validate_private_attrs_\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 592 \u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__post_init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> u\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "> \u001b[0;32m/Users/koen/workspace/PySyft/packages/syft/src/syft/serde/recursive.py\u001b[0m(382)\u001b[0;36mrs_proto2object\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 380 \u001b[0;31m \u001b[0msetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_value\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 381 \u001b[0;31m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m--> 382 \u001b[0;31m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 383 \u001b[0;31m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 384 \u001b[0;31m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> ll\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[1;32m 294 \u001b[0m\u001b[0;32mdef\u001b[0m \u001b[0mrs_proto2object\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0m_DynamicStructBuilder\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mAny\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 295 \u001b[0m \u001b[0;31m# relative\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 296 \u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mdeserialize\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0m_deserialize\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 297 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 298 \u001b[0m \u001b[0;31m# clean this mess, Tudor\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 299 \u001b[0m \u001b[0mmodule_parts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\".\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 300 \u001b[0m \u001b[0mklass\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodule_parts\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpop\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 301 \u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mtype\u001b[0m \u001b[0;34m|\u001b[0m \u001b[0mAny\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 302 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 303 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mklass\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;34m\"NoneType\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 304 \u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 305 \u001b[0m \u001b[0mclass_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mindex_syft_by_module_name\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# type: ignore[assignment,unused-ignore]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 306 \u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# nosec\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 307 \u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 308 \u001b[0m \u001b[0mclass_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodules\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\".\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodule_parts\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mklass\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 309 \u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# nosec\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 310 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m\"syft.user\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 311 \u001b[0m \u001b[0;31m# relative\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 312 \u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnode\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnode\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mCODE_RELOADER\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 313 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 314 \u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mload_user_code\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mCODE_RELOADER\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 315 \u001b[0m \u001b[0mload_user_code\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 316 \u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 317 \u001b[0m \u001b[0mclass_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodules\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\".\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodule_parts\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mklass\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 318 \u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# nosec\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 319 \u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 320 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 321 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mTYPE_BANK\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 322 \u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf\"\u001b[0m\u001b[0;34m{\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m not in TYPE_BANK\u001b[0m\u001b[0;34m\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 323 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 324 \u001b[0m \u001b[0;31m# TODO: 🐉 sort this out, basically sometimes the syft.user classes are not in the\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 325 \u001b[0m \u001b[0;31m# module name space in sub-processes or threads even though they are loaded on start\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 326 \u001b[0m \u001b[0;31m# its possible that the uvicorn awsgi server is preloading a bunch of threads\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 327 \u001b[0m \u001b[0;31m# however simply getting the class from the TYPE_BANK doesn't always work and\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 328 \u001b[0m \u001b[0;31m# causes some errors so it seems like we want to get the local one where possible\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 329 \u001b[0m (\n", + "\u001b[1;32m 330 \u001b[0m \u001b[0mnonrecursive\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 331 \u001b[0m \u001b[0mserialize\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 332 \u001b[0m \u001b[0mdeserialize\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 333 \u001b[0m \u001b[0mattribute_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 334 \u001b[0m \u001b[0mexclude_attrs_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 335 \u001b[0m \u001b[0mserde_overrides\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 336 \u001b[0m \u001b[0mhash_exclude_attrs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 337 \u001b[0m \u001b[0mcls\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 338 \u001b[0m \u001b[0mattribute_types\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 339 \u001b[0m \u001b[0mversion\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 340 \u001b[0m \u001b[0;34m)\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mTYPE_BANK\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 341 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 342 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mclass_type\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 343 \u001b[0m \u001b[0;31m# yes this looks stupid but it works and the opposite breaks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 344 \u001b[0m \u001b[0mclass_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcls\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 345 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 346 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mnonrecursive\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 347 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mdeserialize\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 348 \u001b[0m raise Exception(\n", + "\u001b[1;32m 349 \u001b[0m \u001b[0;34mf\"\u001b[0m\u001b[0;34mCant serialize \u001b[0m\u001b[0;34m{\u001b[0m\u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m nonrecursive without serialize.\u001b[0m\u001b[0;34m\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 350 \u001b[0m \u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 351 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 352 \u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mdeserialize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcombine_bytes\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnonrecursiveBlob\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 353 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 354 \u001b[0m \u001b[0mkwargs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 355 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 356 \u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_bytes_list\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfieldsName\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfieldsData\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 357 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mattr_name\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;34m\"\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 358 \u001b[0m \u001b[0mattr_bytes\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcombine_bytes\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mattr_bytes_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 359 \u001b[0m \u001b[0mattr_value\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_deserialize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mattr_bytes\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfrom_bytes\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 360 \u001b[0m \u001b[0mtransforms\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mserde_overrides\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 361 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 362 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mtransforms\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 363 \u001b[0m \u001b[0mattr_value\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtransforms\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mattr_value\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 364 \u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mattr_name\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mattr_value\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 365 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 366 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclass_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"serde_constructor\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 367 \u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mserde_constructor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 368 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 369 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0missubclass\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclass_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mEnum\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0;34m\"value\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 370 \u001b[0m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__new__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclass_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"value\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 371 \u001b[0m \u001b[0;32melif\u001b[0m \u001b[0missubclass\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclass_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mBaseModel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 372 \u001b[0m \u001b[0;31m# if we skip the __new__ flow of BaseModel we get the error\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 373 \u001b[0m \u001b[0;31m# AttributeError: object has no attribute '__fields_set__'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 374 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 375 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m\"syft.user\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 376 \u001b[0m \u001b[0;31m# weird issues with pydantic and ForwardRef on user classes being inited\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 377 \u001b[0m \u001b[0;31m# with custom state args / kwargs\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 378 \u001b[0m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 379 \u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_value\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 380 \u001b[0m \u001b[0msetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_value\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 381 \u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m--> 382 \u001b[0;31m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[1;32m 383 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 384 \u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 385 \u001b[0m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__new__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclass_type\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# type: ignore\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 386 \u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_value\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 387 \u001b[0m \u001b[0msetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_value\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 388 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 389 \u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mobj\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[1;32m 390 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> klass\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "'SyftLog'\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> proto.fullyQualifiedName\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "'syft.service.log.log.SyftLog'\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> class_type\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> index_syft_by_module_name(proto.fullyQualifiedName)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> q\n" + ] + } + ], + "source": [ + "%debug" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9af1f589-6df1-4b5e-9318-13d3c6273863", + "metadata": {}, + "outputs": [], + "source": [ + "import syft as syft" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6343e392-0b74-420c-b80a-9a940dd1e169", + "metadata": {}, + "outputs": [], + "source": [ + "import sys" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f17e274f-acd5-419f-8f78-3ae8a10ed0c4", + "metadata": {}, + "outputs": [], + "source": [ + "from syft.serde.recursive import TYPE_BANK" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "53874015-5133-41eb-a158-afef9f64dc50", + "metadata": {}, + "outputs": [], + "source": [ + "# TYPE_BANK[\"syft.service.log.log.SyftLog\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a6731ef4-a334-4d9c-9d44-7c3dd48ce442", + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Type" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "606f90c5-5b53-4b76-b4dc-5ef6e7635dce", + "metadata": {}, + "outputs": [], + "source": [ + "class SyftClassRegistry():\n", + " __class_registry__: dict[tuple, Type] = {}\n", + " is_built = False\n", + "\n", + " @classmethod\n", + " def build():\n", + " for path, v in TYPE_BANK:\n", + " (nonrecursive,\n", + " serialize,\n", + " deserialize,\n", + " attribute_list,\n", + " exclude_attrs_list,\n", + " serde_overrides,\n", + " hash_exclude_attrs,\n", + " cls,\n", + " attribute_types,\n", + " version) = v\n", + " cls.__class_registry__[cls.__canonical_name__, cls.__version__] = cls\n", + " cls.is_build=True\n", + " \n", + " @classmethod\n", + " def get(cls, cannonical_name: str, version: str):\n", + " if not cls.is_built:\n", + " cls.build()\n", + " return cls.__class_registry__[cannonical_name, version]\n", + " \n", + " \n", + " \n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f08f3aa-d688-4820-93de-f0605d061c39", + "metadata": {}, + "outputs": [], + "source": [ + "(nonrecursive,\n", + "serialize,\n", + "deserialize,\n", + "attribute_list,\n", + "exclude_attrs_list,\n", + "serde_overrides,\n", + "hash_exclude_attrs,\n", + "cls,\n", + "attribute_types,\n", + "version) = TYPE_BANK[\"syft.service.settings.settings.NodeSettingsV2\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0f4bb863-208e-4d85-bfa4-3fd5c4ce9190", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'SyftLog'" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cls.__canonical_name__" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3e7a686c-f847-422d-a4e9-191914e20395", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "syft.service.log.log.SyftLog" + ] + }, + "execution_count": 41, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cls" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bcf6ea01-e9d7-4c45-9197-95d663bc0320", + "metadata": {}, + "outputs": [], + "source": [ + "(nonrecursive,\n", + "serialize,\n", + "deserialize,\n", + "attribute_list,\n", + "exclude_attrs_list,\n", + "serde_overrides,\n", + "hash_exclude_attrs,\n", + "cls,\n", + "attribute_types,\n", + "version) = TYPE_BANK[\"syft.service.log.log.SyftLog\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4423ae98-12d1-44e9-8822-cf446a861ea4", + "metadata": {}, + "outputs": [], + "source": [ + "from syft.protocol.data_protocol import get_data_protocol\n", + "\n", + "dp = get_data_protocol()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "68581aa2-288a-4df0-8280-3a6645966583", + "metadata": {}, + "outputs": [], + "source": [ + "# dp.current" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13372b49-2f1f-4eef-b677-0d3dc0d6aa65", + "metadata": {}, + "outputs": [], + "source": [ + "# dp.protocol_history" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3d99784e-91e7-48c7-b0bc-0e5457932513", + "metadata": {}, + "outputs": [], + "source": [ + "version = \"3\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c0fa6720-a5bf-41bf-b9cc-3913529ff269", + "metadata": {}, + "outputs": [ + { + "ename": "AttributeError", + "evalue": "module 'syft.service.log.log' has no attribute 'SyftLogV1'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[21], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43msys\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmodules\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43msyft.service.log.log\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mSyftLogV1\u001b[49m\n", + "\u001b[0;31mAttributeError\u001b[0m: module 'syft.service.log.log' has no attribute 'SyftLogV1'" + ] + } + ], + "source": [ + "sys.modules[\"syft.service.log.log\"].SyftLogV1" + ] + }, + { + "cell_type": "markdown", + "id": "6d26438a-32a0-4e4c-be6e-fd063905db44", + "metadata": {}, + "source": [ + "- problem 1: it indexes classes by their location in the previous version, which may change in the current version\n", + "- problem 2: resolving the name of the class to the version is dynamic and has a different meaning accross versions, therefore having just the class name is not enough\n", + "\n", + "Solution: create a `SyftClassRegistry` that follows the same patterns as the `stage_` logic\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c4c32464-1053-43f2-819a-d7c11d0f83c5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "syft.service.log.log" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2f4b6e5f-1542-44c6-bd2c-de0871b00de1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "syft.__dict__[\"service\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dc2b7e8d-0e8a-4c9b-9259-ad3857c33901", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/experimental/migration/log086 b/notebooks/experimental/migration/log086 new file mode 100644 index 0000000000000000000000000000000000000000..a8050298c2f4a61b818bd259767e9b1e97867e93 GIT binary patch literal 800 zcmcIhu}TCn5RJzwC@3n3+<~Qqg#oR^PQ5!~T|BHZofmgusPO?F`|ue;XLUhoI5 z5d`rsEN%S-8{cfQf@mY?z?(P8%)AL1#uy6$X8~J5z=3toli)X?Hd78PH!>?!D0!me z3dguS)G^hp0xe(!Yy)G@uiyuuIl;Q&q^89l-~z z2~Tx3(|bw3w(7mMypo3JD&nX6ZI_|CO@NsCi(ZmTFwJ>xz`;5E@xgib-q=WHna}$r!Mi|b3XtOU16*M literal 0 HcmV?d00001 From 38d28cc32ea6a47109a989e2bd6de017c136b4a0 Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 14 Jun 2024 18:08:25 +0300 Subject: [PATCH 147/313] remove notebooks from migration --- .../migration/0-prepare-migration-data.ipynb | 408 ------- .../migration/1-connect-and-migrate.ipynb | 1016 ----------------- notebooks/experimental/migration/log086 | Bin 800 -> 0 bytes 3 files changed, 1424 deletions(-) delete mode 100644 notebooks/experimental/migration/0-prepare-migration-data.ipynb delete mode 100644 notebooks/experimental/migration/1-connect-and-migrate.ipynb delete mode 100644 notebooks/experimental/migration/log086 diff --git a/notebooks/experimental/migration/0-prepare-migration-data.ipynb b/notebooks/experimental/migration/0-prepare-migration-data.ipynb deleted file mode 100644 index 0eaba026143..00000000000 --- a/notebooks/experimental/migration/0-prepare-migration-data.ipynb +++ /dev/null @@ -1,408 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "05a33e56-64df-408b-bfa0-a9a73086a916", - "metadata": {}, - "outputs": [], - "source": [ - "# syft absolute\n", - "import syft as sy\n", - "from syft import ActionObject\n", - "import numpy as np" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "276409fc-02bd-4bfb-a958-dfcf399af76b", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'0.8.6'" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "sy.__version__" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "30ce91ba", - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "os.environ[\"ORCHESTRA_DEPLOYMENT_TYPE\"] = \"k8s\"" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "19faad95-736d-4ad2-b650-da37a6a4ee8a", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "node = sy.orchestra.launch(name=\"syft-example-domain-1\", deploy_to=\"k8s\")\n", - "node" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "8dc8067e-c238-4bb6-a5b5-7f53bb5175f7", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as GUEST\n" - ] - } - ], - "source": [ - "client = node.login(email=\"info@openmined.org\", password=\"Zp33HpLoC2cHVYsYuyOD6Q7M2v7NKHdt\")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "e9fd34c6", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftError: No user exists with info@openmined.org and supplied password.

" - ], - "text/plain": [ - "SyftError: No user exists with info@openmined.org and supplied password." - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "41685ebe-734e-4b72-9c8c-577e7c0fa36c", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: User 'John Doe' successfully registered! To see users, run `[your_client].users`

" - ], - "text/plain": [ - "SyftSuccess: User 'John Doe' successfully registered! To see users, run `[your_client].users`" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.register(\n", - " email=\"ds@openmined.org\", name=\"John Doe\", password=\"pw\", password_verify=\"pw\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "86be45d6-b973-472f-9cc2-44aa51b8cdce", - "metadata": {}, - "source": [ - "# Prepare some data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bc4eac13-013b-4841-9391-0f026a043746", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - } - ], - "source": [ - "client_ds = node.login(email=\"ds@openmined.org\", password=\"pw\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "705c0d71-ec57-4707-bebf-899ad4669010", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "100%|██████████| 1/1 [00:00<00:00, 11.69it/s]" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Uploading: numpy-data\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Dataset uploaded to 'test_upgradbility'. To see the datasets uploaded by a client on this node, use command `[your_client].datasets`

" - ], - "text/plain": [ - "SyftSuccess: Dataset uploaded to 'test_upgradbility'. To see the datasets uploaded by a client on this node, use command `[your_client].datasets`" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dataset = sy.Dataset(\n", - " name=\"my-dataset\",\n", - " description=\"abc\",\n", - " asset_list=[\n", - " sy.Asset(\n", - " name=\"numpy-data\",\n", - " mock=np.array([10, 11, 12, 13, 14]),\n", - " data=np.array([15, 16, 17, 18, 19]),\n", - " mock_is_real=True,\n", - " )\n", - " ],\n", - ")\n", - "\n", - "client.upload_dataset(dataset)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f0f5d9e2-4fa3-48fb-bf09-67c5821fc439", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'compute_mean' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'compute_mean' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "data_low = client_ds.datasets[0].assets[0]\n", - "\n", - "@sy.syft_function_single_use(data=data_low)\n", - "def compute_mean(data) -> float:\n", - " print(\"Computing mean...\")\n", - " return data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e47e1e4e-ae5b-420f-8daf-024dde9b1753", - "metadata": {}, - "outputs": [], - "source": [ - "req = client_ds.code.request_code_execution(compute_mean)" - ] - }, - { - "cell_type": "markdown", - "id": "4348d23b-1fbb-4510-9ddb-2c8bb2a157c7", - "metadata": {}, - "source": [ - "## TODO: add more data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "273e5d98-054f-4945-8e76-9d5585638866", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Approving request for domain test_upgradbility\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Request 0e2a01b055a247eda6ce5a288e73484c changes applied

" - ], - "text/plain": [ - "SyftSuccess: Request 0e2a01b055a247eda6ce5a288e73484c changes applied" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client.requests[0].approve()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8ebd189c-b52a-4fee-bc55-f28bdd87b902", - "metadata": {}, - "outputs": [], - "source": [ - "job = client_ds.code.compute_mean(data=data_low, blocking=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a9eb0e15-7a93-4c18-bb15-c6939684b565", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "28/05/24 11:37:30 FUNCTION LOG (9598bc5afa504c67aa8c33265bebd4ed): Computing mean...\n" - ] - } - ], - "source": [ - "res = job.wait()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2efa6b31-2739-4d47-908c-5c04c43c5cbd", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([15, 16, 17, 18, 19])" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "res.get()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3ee26222-3ff4-4783-a41d-73eb8368489c", - "metadata": {}, - "outputs": [], - "source": [ - "# job.wait()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31137c30-07e3-47a4-beea-a3e1f6e0f46d", - "metadata": {}, - "outputs": [], - "source": [ - "# res" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6f72f411-74bb-469f-8d61-ffc65e56eb56", - "metadata": {}, - "outputs": [], - "source": [ - "# result = job.wait().get()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b8498780-814f-48b7-9a82-a9a5af4691f1", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "syft_0.8.6", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/experimental/migration/1-connect-and-migrate.ipynb b/notebooks/experimental/migration/1-connect-and-migrate.ipynb deleted file mode 100644 index 8f806ac2513..00000000000 --- a/notebooks/experimental/migration/1-connect-and-migrate.ipynb +++ /dev/null @@ -1,1016 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "8549a375-cac4-4971-a113-d18e45774e62", - "metadata": {}, - "outputs": [], - "source": [ - "# syft absolute\n", - "import syft as sy\n", - "from syft.serde.recursive import TYPE_BANK\n", - "from syft.service.log.log import SyftLog\n", - "from syft.types.syft_object_registry import SyftObjectRegistry\n", - "\n", - "from syft.protocol.data_protocol import get_data_protocol" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "de15efbc-e962-4e67-b9cd-ed33bf7198d2", - "metadata": {}, - "outputs": [], - "source": [ - "dp = get_data_protocol()" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "7997dc38-8c3b-4a06-a050-d9453bfc0f3d", - "metadata": {}, - "outputs": [], - "source": [ - "# dp.protocol_history.keys()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "c6cef09b-0727-45f4-960e-56c2a85259a5", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Staging Protocol Changes...\n", - "Object in Document Store that needs migration: [, , , , , ]\n", - "Migrating data for: NodeSettings table.\n", - "Migrating data for: SyftLog table.\n", - "Migrating data for: JobItem table.\n", - "Migrating data for: NodePeer table.\n", - "Migrating data for: SyncState table.\n", - "Migrating data for: ExecutionOutput table.\n", - "could not find ExecutionOutput 1 in ObjectRegistry\n", - "('ExecutionOutput', 1)\n" - ] - }, - { - "ename": "Exception", - "evalue": "Failed to migrate data for ExecutionOutput. Error: cannot unpack non-iterable NoneType object", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mException\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[4], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m node \u001b[38;5;241m=\u001b[39m sy\u001b[38;5;241m.\u001b[39morchestra\u001b[38;5;241m.\u001b[39mlaunch(\n\u001b[1;32m 2\u001b[0m name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtest_upgradbility\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 3\u001b[0m dev_mode\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[1;32m 4\u001b[0m local_db\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[1;32m 5\u001b[0m n_consumers\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m2\u001b[39m,\n\u001b[1;32m 6\u001b[0m create_producer\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[1;32m 7\u001b[0m )\n", - "File \u001b[0;32m~/OpenMined/PySyft/packages/syft/src/syft/orchestra.py:302\u001b[0m, in \u001b[0;36mOrchestra.launch\u001b[0;34m(name, node_type, deploy_to, node_side_type, port, processes, local_db, dev_mode, reset, tail, host, enable_warnings, n_consumers, thread_workers, create_producer, queue_port, association_request_auto_approval, background_tasks)\u001b[0m\n\u001b[1;32m 297\u001b[0m deployment_type_enum: DeploymentType \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m=\u001b[39m get_deployment_type(\n\u001b[1;32m 298\u001b[0m deployment_type\u001b[38;5;241m=\u001b[39mdeploy_to\n\u001b[1;32m 299\u001b[0m )\n\u001b[1;32m 301\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m deployment_type_enum \u001b[38;5;241m==\u001b[39m DeploymentType\u001b[38;5;241m.\u001b[39mPYTHON:\n\u001b[0;32m--> 302\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m deploy_to_python(\n\u001b[1;32m 303\u001b[0m node_type_enum\u001b[38;5;241m=\u001b[39mnode_type_enum,\n\u001b[1;32m 304\u001b[0m deployment_type_enum\u001b[38;5;241m=\u001b[39mdeployment_type_enum,\n\u001b[1;32m 305\u001b[0m port\u001b[38;5;241m=\u001b[39mport,\n\u001b[1;32m 306\u001b[0m name\u001b[38;5;241m=\u001b[39mname,\n\u001b[1;32m 307\u001b[0m host\u001b[38;5;241m=\u001b[39mhost,\n\u001b[1;32m 308\u001b[0m reset\u001b[38;5;241m=\u001b[39mreset,\n\u001b[1;32m 309\u001b[0m tail\u001b[38;5;241m=\u001b[39mtail,\n\u001b[1;32m 310\u001b[0m dev_mode\u001b[38;5;241m=\u001b[39mdev_mode,\n\u001b[1;32m 311\u001b[0m processes\u001b[38;5;241m=\u001b[39mprocesses,\n\u001b[1;32m 312\u001b[0m local_db\u001b[38;5;241m=\u001b[39mlocal_db,\n\u001b[1;32m 313\u001b[0m node_side_type\u001b[38;5;241m=\u001b[39mnode_side_type_enum,\n\u001b[1;32m 314\u001b[0m enable_warnings\u001b[38;5;241m=\u001b[39menable_warnings,\n\u001b[1;32m 315\u001b[0m n_consumers\u001b[38;5;241m=\u001b[39mn_consumers,\n\u001b[1;32m 316\u001b[0m thread_workers\u001b[38;5;241m=\u001b[39mthread_workers,\n\u001b[1;32m 317\u001b[0m create_producer\u001b[38;5;241m=\u001b[39mcreate_producer,\n\u001b[1;32m 318\u001b[0m queue_port\u001b[38;5;241m=\u001b[39mqueue_port,\n\u001b[1;32m 319\u001b[0m association_request_auto_approval\u001b[38;5;241m=\u001b[39massociation_request_auto_approval,\n\u001b[1;32m 320\u001b[0m background_tasks\u001b[38;5;241m=\u001b[39mbackground_tasks,\n\u001b[1;32m 321\u001b[0m )\n\u001b[1;32m 322\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m deployment_type_enum \u001b[38;5;241m==\u001b[39m DeploymentType\u001b[38;5;241m.\u001b[39mREMOTE:\n\u001b[1;32m 323\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m deploy_to_remote(\n\u001b[1;32m 324\u001b[0m node_type_enum\u001b[38;5;241m=\u001b[39mnode_type_enum,\n\u001b[1;32m 325\u001b[0m deployment_type_enum\u001b[38;5;241m=\u001b[39mdeployment_type_enum,\n\u001b[1;32m 326\u001b[0m name\u001b[38;5;241m=\u001b[39mname,\n\u001b[1;32m 327\u001b[0m node_side_type\u001b[38;5;241m=\u001b[39mnode_side_type_enum,\n\u001b[1;32m 328\u001b[0m )\n", - "File \u001b[0;32m~/OpenMined/PySyft/packages/syft/src/syft/orchestra.py:227\u001b[0m, in \u001b[0;36mdeploy_to_python\u001b[0;34m(node_type_enum, deployment_type_enum, port, name, host, reset, tail, dev_mode, processes, local_db, node_side_type, enable_warnings, n_consumers, thread_workers, create_producer, queue_port, association_request_auto_approval, background_tasks)\u001b[0m\n\u001b[1;32m 225\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnode_type\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m sig\u001b[38;5;241m.\u001b[39mparameters\u001b[38;5;241m.\u001b[39mkeys() \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmigrate\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m sig\u001b[38;5;241m.\u001b[39mparameters:\n\u001b[1;32m 226\u001b[0m supported_kwargs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmigrate\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m--> 227\u001b[0m worker \u001b[38;5;241m=\u001b[39m worker_class\u001b[38;5;241m.\u001b[39mnamed(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39msupported_kwargs)\n\u001b[1;32m 228\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 229\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mNotImplementedError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnode_type: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mnode_type_enum\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m is not supported\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", - "File \u001b[0;32m~/OpenMined/PySyft/packages/syft/src/syft/node/node.py:638\u001b[0m, in \u001b[0;36mNode.named\u001b[0;34m(cls, name, processes, reset, local_db, node_type, node_side_type, enable_warnings, n_consumers, thread_workers, create_producer, queue_port, dev_mode, migrate, in_memory_workers, association_request_auto_approval, background_tasks)\u001b[0m\n\u001b[1;32m 635\u001b[0m node_type \u001b[38;5;241m=\u001b[39m NodeType(node_type)\n\u001b[1;32m 636\u001b[0m node_side_type \u001b[38;5;241m=\u001b[39m NodeSideType(node_side_type)\n\u001b[0;32m--> 638\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mcls\u001b[39m(\n\u001b[1;32m 639\u001b[0m name\u001b[38;5;241m=\u001b[39mname,\n\u001b[1;32m 640\u001b[0m \u001b[38;5;28mid\u001b[39m\u001b[38;5;241m=\u001b[39muid,\n\u001b[1;32m 641\u001b[0m signing_key\u001b[38;5;241m=\u001b[39mkey,\n\u001b[1;32m 642\u001b[0m processes\u001b[38;5;241m=\u001b[39mprocesses,\n\u001b[1;32m 643\u001b[0m local_db\u001b[38;5;241m=\u001b[39mlocal_db,\n\u001b[1;32m 644\u001b[0m node_type\u001b[38;5;241m=\u001b[39mnode_type,\n\u001b[1;32m 645\u001b[0m node_side_type\u001b[38;5;241m=\u001b[39mnode_side_type,\n\u001b[1;32m 646\u001b[0m enable_warnings\u001b[38;5;241m=\u001b[39menable_warnings,\n\u001b[1;32m 647\u001b[0m blob_storage_config\u001b[38;5;241m=\u001b[39mblob_storage_config,\n\u001b[1;32m 648\u001b[0m queue_port\u001b[38;5;241m=\u001b[39mqueue_port,\n\u001b[1;32m 649\u001b[0m n_consumers\u001b[38;5;241m=\u001b[39mn_consumers,\n\u001b[1;32m 650\u001b[0m thread_workers\u001b[38;5;241m=\u001b[39mthread_workers,\n\u001b[1;32m 651\u001b[0m create_producer\u001b[38;5;241m=\u001b[39mcreate_producer,\n\u001b[1;32m 652\u001b[0m dev_mode\u001b[38;5;241m=\u001b[39mdev_mode,\n\u001b[1;32m 653\u001b[0m migrate\u001b[38;5;241m=\u001b[39mmigrate,\n\u001b[1;32m 654\u001b[0m in_memory_workers\u001b[38;5;241m=\u001b[39min_memory_workers,\n\u001b[1;32m 655\u001b[0m reset\u001b[38;5;241m=\u001b[39mreset,\n\u001b[1;32m 656\u001b[0m association_request_auto_approval\u001b[38;5;241m=\u001b[39massociation_request_auto_approval,\n\u001b[1;32m 657\u001b[0m background_tasks\u001b[38;5;241m=\u001b[39mbackground_tasks,\n\u001b[1;32m 658\u001b[0m )\n", - "File \u001b[0;32m~/OpenMined/PySyft/packages/syft/src/syft/node/node.py:413\u001b[0m, in \u001b[0;36mNode.__init__\u001b[0;34m(self, name, id, signing_key, action_store_config, document_store_config, root_email, root_username, root_password, processes, is_subprocess, node_type, local_db, reset, blob_storage_config, queue_config, queue_port, n_consumers, create_producer, thread_workers, node_side_type, enable_warnings, dev_mode, migrate, in_memory_workers, smtp_username, smtp_password, email_sender, smtp_port, smtp_host, association_request_auto_approval, background_tasks)\u001b[0m\n\u001b[1;32m 411\u001b[0m \u001b[38;5;66;03m# Migrate data before any operation on db\u001b[39;00m\n\u001b[1;32m 412\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m migrate:\n\u001b[0;32m--> 413\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfind_and_migrate_data()\n\u001b[1;32m 415\u001b[0m \u001b[38;5;66;03m# first migrate, for backwards compatibility\u001b[39;00m\n\u001b[1;32m 416\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39minit_queue_manager(queue_config\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mqueue_config)\n", - "File \u001b[0;32m~/OpenMined/PySyft/packages/syft/src/syft/node/node.py:746\u001b[0m, in \u001b[0;36mNode.find_and_migrate_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 742\u001b[0m migration_status \u001b[38;5;241m=\u001b[39m object_partition\u001b[38;5;241m.\u001b[39mmigrate_data(\n\u001b[1;32m 743\u001b[0m to_klass\u001b[38;5;241m=\u001b[39mobject_type, context\u001b[38;5;241m=\u001b[39mcontext\n\u001b[1;32m 744\u001b[0m )\n\u001b[1;32m 745\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m migration_status\u001b[38;5;241m.\u001b[39mis_err():\n\u001b[0;32m--> 746\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m(\n\u001b[1;32m 747\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFailed to migrate data for \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mcanonical_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m. Error: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mmigration_status\u001b[38;5;241m.\u001b[39merr()\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 748\u001b[0m )\n\u001b[1;32m 750\u001b[0m \u001b[38;5;66;03m# Track all object types from action store\u001b[39;00m\n\u001b[1;32m 751\u001b[0m action_object_types \u001b[38;5;241m=\u001b[39m [Action, ActionObject]\n", - "\u001b[0;31mException\u001b[0m: Failed to migrate data for ExecutionOutput. Error: cannot unpack non-iterable NoneType object" - ] - } - ], - "source": [ - "node = sy.orchestra.launch(\n", - " name=\"test_upgradbility\",\n", - " dev_mode=True,\n", - " local_db=True,\n", - " n_consumers=2,\n", - " create_producer=True,\n", - ")\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "62e2095b-5415-4e51-bccc-2671b2aecd53", - "metadata": {}, - "outputs": [], - "source": [ - "%debug" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "52595e37-80e9-4432-8336-6eba3428b861", - "metadata": {}, - "outputs": [], - "source": [ - "# %debug" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "157814cf-7a8e-48b4-97e4-0a79d50f6c91", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "id": "588875b5-4240-4189-a35f-f570dff109cb", - "metadata": {}, - "source": [ - "## Appendix" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "fbc74a67-3ba5-4fdf-93bb-c419fab0bafc", - "metadata": {}, - "outputs": [], - "source": [ - "log = SyftLog(job_id=sy.UID())" - ] - }, - { - "cell_type": "markdown", - "id": "af822756-804a-4919-ab81-c74bfcabedd3", - "metadata": {}, - "source": [ - "eventually there are 3 cases\n", - "- its s SyftObject with canonical_name and version (this works)\n", - "- its not a syftobject, it still has a canonical_name and version (currently this is handled by splitting the fqn) (requires refactor)\n", - "\n", - "- it has a fqn (can either be SyftObject or non SyftObject) from previous version\n", - " - For deserialization of SyftObjects, this will give you the latest object\n", - " \n", - "\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3910e732-c144-453b-834e-f9a1f3e1638e", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "# we want to know, for this fqn, what is the object version. It should always be the one described in 4, because thats \n", - "# the only migration we support\n", - "# new_version = any([v[\"action\"] == \"add\" for v in dp.protocol_history[\"dev\"][\"object_versions\"][\"SyftLog\"].values()])\n", - " # has the mapping of all the protocol version and classes version that are added or removed." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "49f85396-67c1-468e-8ef9-e79a7b03bbd2", - "metadata": {}, - "outputs": [], - "source": [ - "# SyftLog.__canonical_name__" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "283d2122-31cf-48a6-8353-58644c930402", - "metadata": {}, - "outputs": [], - "source": [ - "# version_mutations" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "321b6994-3574-4c88-89ba-b4fb4070e9e3", - "metadata": {}, - "outputs": [], - "source": [ - "# bts = sy.serialize(log, to_bytes=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f67a2444-3b34-42f3-9c61-10cbc0cfb9aa", - "metadata": {}, - "outputs": [], - "source": [ - "# with open(\"log086\", \"rb\") as f:\n", - "# bts = f.read()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d879fa76-390e-4662-87df-1d9a694ac766", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "SyftLog 3\n", - "3\n" - ] - } - ], - "source": [ - "log_des = sy.deserialize(bts, from_bytes=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "89b06471-3196-482e-abc6-41ff486fa70c", - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "class SyftLogV3:\n", - " id: str = a0adb0f277f54935acf25dc966a1de77\n", - " stdout: str = \"ABC\"\n", - " stderr: str = \"DEF\"\n", - "\n", - "```" - ], - "text/plain": [ - "syft.service.log.log.SyftLogV3" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "log_des" - ] - }, - { - "cell_type": "markdown", - "id": "cf4dab58-1727-4e28-b4c0-c3442ce0f553", - "metadata": {}, - "source": [ - "if we dont have an old version, how are we going to test?\n", - "\n", - "- release a beta and test from there? (hard to test during development)\n", - "- from a branch? (how does this work with protocol versions?)\n", - "- if there is a fqn, we assume that its 0.8.6?\n", - "- " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4fe63393-d834-42cd-b697-65e54a08026d", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "TYPE_BANK[\"syft.service.log.log.SyftLog\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b31309c4-2819-4a14-983e-ca62f537ebda", - "metadata": {}, - "outputs": [], - "source": [ - "#how do we test, because the old version does not contain any information" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ef0351a9-1fdb-488d-bb9b-d0e6016121fa", - "metadata": {}, - "outputs": [], - "source": [ - "from syft.types.syft_object import SyftObjectRegistry" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14aaac2b-8ad9-413f-be3d-d7369763f7e9", - "metadata": {}, - "outputs": [], - "source": [ - "SyftObjectRegistry.__object_version_registry__[\"SyftLog_3\"]" - ] - }, - { - "cell_type": "markdown", - "id": "5321f915-7b8c-471f-a5e4-261d764f961f", - "metadata": {}, - "source": [ - "currently `fqn` -> all serialization properties\n", - "\n", - "this is problematic because `fqn` can point to different object versions (or to nothing) in different versions\n", - "\n", - "instead we want `version_string` to point to all those things\n", - "\n", - "\n", - "we could either\n", - "- remove TYPE_BANK\n", - "- make `version_string` point to `fqn` which and get the serialization properties directly\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "eea8fd60-db50-4ca1-8087-f763b845f90e", - "metadata": {}, - "outputs": [], - "source": [ - "def get_obj_registry_str_from_fqn(fqn):\n", - " (nonrecursive,\n", - " serialize,\n", - " deserialize,\n", - " attribute_list,\n", - " exclude_attrs_list,\n", - " serde_overrides,\n", - " hash_exclude_attrs,\n", - " cls,\n", - " attribute_types,\n", - " version) = TYPE_BANK[fqn]\n", - " object_registry_str = f\"{cls.__canonical_name__}_{cls.__version__}\"\n", - "\n", - " return object_registry_str\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7acfac72-7814-4237-9819-b9d9175c995d", - "metadata": {}, - "outputs": [], - "source": [ - "name = get_obj_registry_str_from_fqn(\"syft.service.log.log.SyftLog\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "89a91634-27e7-4537-a32f-c56d1f4c5c9c", - "metadata": {}, - "outputs": [], - "source": [ - "SyftObjectRegistry.__object_version_registry__[name]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5625828b-773b-4f0a-bedb-b22b964d3eaf", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b9814e27-d302-4b1a-a82f-38a2a52bcb8c", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a104d726-806d-4427-ba3d-0a91cc2cccfe", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "60e3a4d4-6f32-4025-bffe-041b895f0c97", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "# for x in SyftObjectRegistry.__object_version_registry__.keys():\n", - "# print(x)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "25c3b6b3-624b-40c9-9fd8-f24ef003274c", - "metadata": {}, - "outputs": [], - "source": [ - "sy.__version__" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "43da857d-1054-4302-9656-2a51fadb9d00", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b1154e9f-3002-4e8f-a5f9-d097423d5702", - "metadata": {}, - "outputs": [], - "source": [ - "with open(\"log086\", \"rb\") as f:\n", - " bts = f.read()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "85f57681-b046-470a-a266-d4819c47f295", - "metadata": {}, - "outputs": [ - { - "ename": "ValidationError", - "evalue": "1 validation error for SyftLog\njob_id\n Field required [type=missing, input_value={'id': 1\u001b[0m log \u001b[38;5;241m=\u001b[39m \u001b[43msy\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdeserialize\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbts\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfrom_bytes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/serde/deserialize.py:29\u001b[0m, in \u001b[0;36m_deserialize\u001b[0;34m(blob, from_proto, from_bytes)\u001b[0m\n\u001b[1;32m 26\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mWrong deserialization format.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 28\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m from_bytes:\n\u001b[0;32m---> 29\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mrs_bytes2object\u001b[49m\u001b[43m(\u001b[49m\u001b[43mblob\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 31\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m from_proto:\n\u001b[1;32m 32\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m rs_proto2object(blob)\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/serde/recursive.py:291\u001b[0m, in \u001b[0;36mrs_bytes2object\u001b[0;34m(blob)\u001b[0m\n\u001b[1;32m 286\u001b[0m MAX_TRAVERSAL_LIMIT \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m2\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m64\u001b[39m \u001b[38;5;241m-\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m 288\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m recursive_scheme\u001b[38;5;241m.\u001b[39mfrom_bytes(\n\u001b[1;32m 289\u001b[0m blob, traversal_limit_in_words\u001b[38;5;241m=\u001b[39mMAX_TRAVERSAL_LIMIT\n\u001b[1;32m 290\u001b[0m ) \u001b[38;5;28;01mas\u001b[39;00m msg:\n\u001b[0;32m--> 291\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mrs_proto2object\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmsg\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/serde/recursive.py:382\u001b[0m, in \u001b[0;36mrs_proto2object\u001b[0;34m(proto)\u001b[0m\n\u001b[1;32m 380\u001b[0m \u001b[38;5;28msetattr\u001b[39m(obj, attr_name, attr_value)\n\u001b[1;32m 381\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 382\u001b[0m obj \u001b[38;5;241m=\u001b[39m \u001b[43mclass_type\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 384\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 385\u001b[0m obj \u001b[38;5;241m=\u001b[39m class_type\u001b[38;5;241m.\u001b[39m\u001b[38;5;21m__new__\u001b[39m(class_type) \u001b[38;5;66;03m# type: ignore\u001b[39;00m\n", - "File \u001b[0;32m~/workspace/PySyft/packages/syft/src/syft/types/syft_object.py:590\u001b[0m, in \u001b[0;36mSyftObject.__init__\u001b[0;34m(self, **kwargs)\u001b[0m\n\u001b[1;32m 589\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__init__\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 590\u001b[0m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;21;43m__init__\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 591\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_syft_set_validate_private_attrs_(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 592\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m__post_init__()\n", - "File \u001b[0;32m/opt/anaconda3/envs/syft/lib/python3.12/site-packages/pydantic/main.py:171\u001b[0m, in \u001b[0;36mBaseModel.__init__\u001b[0;34m(self, **data)\u001b[0m\n\u001b[1;32m 169\u001b[0m \u001b[38;5;66;03m# `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks\u001b[39;00m\n\u001b[1;32m 170\u001b[0m __tracebackhide__ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m--> 171\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__pydantic_validator__\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mvalidate_python\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mself_instance\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m)\u001b[49m\n", - "\u001b[0;31mValidationError\u001b[0m: 1 validation error for SyftLog\njob_id\n Field required [type=missing, input_value={'id': \u001b[0;32m/opt/anaconda3/envs/syft/lib/python3.12/site-packages/pydantic/main.py\u001b[0m(171)\u001b[0;36m__init__\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m 169 \u001b[0;31m \u001b[0;31m# `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 170 \u001b[0;31m \u001b[0m__tracebackhide__\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m--> 171 \u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__pydantic_validator__\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalidate_python\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself_instance\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 172 \u001b[0;31m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 173 \u001b[0;31m \u001b[0;31m# The following line sets a flag that we use to determine when `__init__` gets overridden by the user\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ipdb> u\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "> \u001b[0;32m/Users/koen/workspace/PySyft/packages/syft/src/syft/types/syft_object.py\u001b[0m(590)\u001b[0;36m__init__\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m 588 \u001b[0;31m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 589 \u001b[0;31m \u001b[0;32mdef\u001b[0m \u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mAny\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m--> 590 \u001b[0;31m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 591 \u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_syft_set_validate_private_attrs_\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 592 \u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__post_init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ipdb> u\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "> \u001b[0;32m/Users/koen/workspace/PySyft/packages/syft/src/syft/serde/recursive.py\u001b[0m(382)\u001b[0;36mrs_proto2object\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m 380 \u001b[0;31m \u001b[0msetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_value\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 381 \u001b[0;31m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m--> 382 \u001b[0;31m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 383 \u001b[0;31m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 384 \u001b[0;31m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ipdb> ll\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[1;32m 294 \u001b[0m\u001b[0;32mdef\u001b[0m \u001b[0mrs_proto2object\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0m_DynamicStructBuilder\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mAny\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 295 \u001b[0m \u001b[0;31m# relative\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 296 \u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0mdeserialize\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0m_deserialize\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 297 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 298 \u001b[0m \u001b[0;31m# clean this mess, Tudor\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 299 \u001b[0m \u001b[0mmodule_parts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\".\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 300 \u001b[0m \u001b[0mklass\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodule_parts\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpop\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 301 \u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mtype\u001b[0m \u001b[0;34m|\u001b[0m \u001b[0mAny\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 302 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 303 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mklass\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;34m\"NoneType\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 304 \u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 305 \u001b[0m \u001b[0mclass_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mindex_syft_by_module_name\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# type: ignore[assignment,unused-ignore]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 306 \u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# nosec\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 307 \u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 308 \u001b[0m \u001b[0mclass_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodules\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\".\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodule_parts\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mklass\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 309 \u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# nosec\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 310 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m\"syft.user\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 311 \u001b[0m \u001b[0;31m# relative\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 312 \u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;34m.\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnode\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnode\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mCODE_RELOADER\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 313 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 314 \u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mload_user_code\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mCODE_RELOADER\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 315 \u001b[0m \u001b[0mload_user_code\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 316 \u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 317 \u001b[0m \u001b[0mclass_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodules\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\".\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodule_parts\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mklass\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 318 \u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# nosec\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 319 \u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 320 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 321 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mTYPE_BANK\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 322 \u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf\"\u001b[0m\u001b[0;34m{\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m not in TYPE_BANK\u001b[0m\u001b[0;34m\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 323 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 324 \u001b[0m \u001b[0;31m# TODO: 🐉 sort this out, basically sometimes the syft.user classes are not in the\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 325 \u001b[0m \u001b[0;31m# module name space in sub-processes or threads even though they are loaded on start\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 326 \u001b[0m \u001b[0;31m# its possible that the uvicorn awsgi server is preloading a bunch of threads\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 327 \u001b[0m \u001b[0;31m# however simply getting the class from the TYPE_BANK doesn't always work and\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 328 \u001b[0m \u001b[0;31m# causes some errors so it seems like we want to get the local one where possible\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 329 \u001b[0m (\n", - "\u001b[1;32m 330 \u001b[0m \u001b[0mnonrecursive\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 331 \u001b[0m \u001b[0mserialize\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 332 \u001b[0m \u001b[0mdeserialize\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 333 \u001b[0m \u001b[0mattribute_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 334 \u001b[0m \u001b[0mexclude_attrs_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 335 \u001b[0m \u001b[0mserde_overrides\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 336 \u001b[0m \u001b[0mhash_exclude_attrs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 337 \u001b[0m \u001b[0mcls\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 338 \u001b[0m \u001b[0mattribute_types\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 339 \u001b[0m \u001b[0mversion\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 340 \u001b[0m \u001b[0;34m)\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mTYPE_BANK\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 341 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 342 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mclass_type\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 343 \u001b[0m \u001b[0;31m# yes this looks stupid but it works and the opposite breaks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 344 \u001b[0m \u001b[0mclass_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcls\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 345 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 346 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mnonrecursive\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 347 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mdeserialize\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 348 \u001b[0m raise Exception(\n", - "\u001b[1;32m 349 \u001b[0m \u001b[0;34mf\"\u001b[0m\u001b[0;34mCant serialize \u001b[0m\u001b[0;34m{\u001b[0m\u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m nonrecursive without serialize.\u001b[0m\u001b[0;34m\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 350 \u001b[0m \u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 351 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 352 \u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mdeserialize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcombine_bytes\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnonrecursiveBlob\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 353 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 354 \u001b[0m \u001b[0mkwargs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 355 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 356 \u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_bytes_list\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfieldsName\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfieldsData\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 357 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mattr_name\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;34m\"\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 358 \u001b[0m \u001b[0mattr_bytes\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcombine_bytes\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mattr_bytes_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 359 \u001b[0m \u001b[0mattr_value\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_deserialize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mattr_bytes\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfrom_bytes\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 360 \u001b[0m \u001b[0mtransforms\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mserde_overrides\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 361 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 362 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mtransforms\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 363 \u001b[0m \u001b[0mattr_value\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtransforms\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mattr_value\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 364 \u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mattr_name\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mattr_value\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 365 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 366 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclass_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"serde_constructor\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 367 \u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mserde_constructor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 368 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 369 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0missubclass\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclass_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mEnum\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0;34m\"value\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 370 \u001b[0m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__new__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclass_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"value\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 371 \u001b[0m \u001b[0;32melif\u001b[0m \u001b[0missubclass\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclass_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mBaseModel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 372 \u001b[0m \u001b[0;31m# if we skip the __new__ flow of BaseModel we get the error\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 373 \u001b[0m \u001b[0;31m# AttributeError: object has no attribute '__fields_set__'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 374 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 375 \u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m\"syft.user\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mproto\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfullyQualifiedName\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 376 \u001b[0m \u001b[0;31m# weird issues with pydantic and ForwardRef on user classes being inited\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 377 \u001b[0m \u001b[0;31m# with custom state args / kwargs\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 378 \u001b[0m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 379 \u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_value\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 380 \u001b[0m \u001b[0msetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_value\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 381 \u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m--> 382 \u001b[0;31m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[1;32m 383 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 384 \u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 385 \u001b[0m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclass_type\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__new__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclass_type\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# type: ignore\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 386 \u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_value\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 387 \u001b[0m \u001b[0msetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_value\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 388 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 389 \u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mobj\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[1;32m 390 \u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ipdb> klass\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "'SyftLog'\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ipdb> proto.fullyQualifiedName\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "'syft.service.log.log.SyftLog'\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ipdb> class_type\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ipdb> index_syft_by_module_name(proto.fullyQualifiedName)\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "ipdb> q\n" - ] - } - ], - "source": [ - "%debug" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9af1f589-6df1-4b5e-9318-13d3c6273863", - "metadata": {}, - "outputs": [], - "source": [ - "import syft as syft" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6343e392-0b74-420c-b80a-9a940dd1e169", - "metadata": {}, - "outputs": [], - "source": [ - "import sys" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f17e274f-acd5-419f-8f78-3ae8a10ed0c4", - "metadata": {}, - "outputs": [], - "source": [ - "from syft.serde.recursive import TYPE_BANK" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "53874015-5133-41eb-a158-afef9f64dc50", - "metadata": {}, - "outputs": [], - "source": [ - "# TYPE_BANK[\"syft.service.log.log.SyftLog\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a6731ef4-a334-4d9c-9d44-7c3dd48ce442", - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Type" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "606f90c5-5b53-4b76-b4dc-5ef6e7635dce", - "metadata": {}, - "outputs": [], - "source": [ - "class SyftClassRegistry():\n", - " __class_registry__: dict[tuple, Type] = {}\n", - " is_built = False\n", - "\n", - " @classmethod\n", - " def build():\n", - " for path, v in TYPE_BANK:\n", - " (nonrecursive,\n", - " serialize,\n", - " deserialize,\n", - " attribute_list,\n", - " exclude_attrs_list,\n", - " serde_overrides,\n", - " hash_exclude_attrs,\n", - " cls,\n", - " attribute_types,\n", - " version) = v\n", - " cls.__class_registry__[cls.__canonical_name__, cls.__version__] = cls\n", - " cls.is_build=True\n", - " \n", - " @classmethod\n", - " def get(cls, cannonical_name: str, version: str):\n", - " if not cls.is_built:\n", - " cls.build()\n", - " return cls.__class_registry__[cannonical_name, version]\n", - " \n", - " \n", - " \n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8f08f3aa-d688-4820-93de-f0605d061c39", - "metadata": {}, - "outputs": [], - "source": [ - "(nonrecursive,\n", - "serialize,\n", - "deserialize,\n", - "attribute_list,\n", - "exclude_attrs_list,\n", - "serde_overrides,\n", - "hash_exclude_attrs,\n", - "cls,\n", - "attribute_types,\n", - "version) = TYPE_BANK[\"syft.service.settings.settings.NodeSettingsV2\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0f4bb863-208e-4d85-bfa4-3fd5c4ce9190", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'SyftLog'" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "cls.__canonical_name__" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3e7a686c-f847-422d-a4e9-191914e20395", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "syft.service.log.log.SyftLog" - ] - }, - "execution_count": 41, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "cls" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bcf6ea01-e9d7-4c45-9197-95d663bc0320", - "metadata": {}, - "outputs": [], - "source": [ - "(nonrecursive,\n", - "serialize,\n", - "deserialize,\n", - "attribute_list,\n", - "exclude_attrs_list,\n", - "serde_overrides,\n", - "hash_exclude_attrs,\n", - "cls,\n", - "attribute_types,\n", - "version) = TYPE_BANK[\"syft.service.log.log.SyftLog\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4423ae98-12d1-44e9-8822-cf446a861ea4", - "metadata": {}, - "outputs": [], - "source": [ - "from syft.protocol.data_protocol import get_data_protocol\n", - "\n", - "dp = get_data_protocol()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "68581aa2-288a-4df0-8280-3a6645966583", - "metadata": {}, - "outputs": [], - "source": [ - "# dp.current" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13372b49-2f1f-4eef-b677-0d3dc0d6aa65", - "metadata": {}, - "outputs": [], - "source": [ - "# dp.protocol_history" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3d99784e-91e7-48c7-b0bc-0e5457932513", - "metadata": {}, - "outputs": [], - "source": [ - "version = \"3\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c0fa6720-a5bf-41bf-b9cc-3913529ff269", - "metadata": {}, - "outputs": [ - { - "ename": "AttributeError", - "evalue": "module 'syft.service.log.log' has no attribute 'SyftLogV1'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[21], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43msys\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmodules\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43msyft.service.log.log\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mSyftLogV1\u001b[49m\n", - "\u001b[0;31mAttributeError\u001b[0m: module 'syft.service.log.log' has no attribute 'SyftLogV1'" - ] - } - ], - "source": [ - "sys.modules[\"syft.service.log.log\"].SyftLogV1" - ] - }, - { - "cell_type": "markdown", - "id": "6d26438a-32a0-4e4c-be6e-fd063905db44", - "metadata": {}, - "source": [ - "- problem 1: it indexes classes by their location in the previous version, which may change in the current version\n", - "- problem 2: resolving the name of the class to the version is dynamic and has a different meaning accross versions, therefore having just the class name is not enough\n", - "\n", - "Solution: create a `SyftClassRegistry` that follows the same patterns as the `stage_` logic\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c4c32464-1053-43f2-819a-d7c11d0f83c5", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "syft.service.log.log" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2f4b6e5f-1542-44c6-bd2c-de0871b00de1", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "syft.__dict__[\"service\"]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "dc2b7e8d-0e8a-4c9b-9259-ad3857c33901", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.8" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/notebooks/experimental/migration/log086 b/notebooks/experimental/migration/log086 deleted file mode 100644 index a8050298c2f4a61b818bd259767e9b1e97867e93..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 800 zcmcIhu}TCn5RJzwC@3n3+<~Qqg#oR^PQ5!~T|BHZofmgusPO?F`|ue;XLUhoI5 z5d`rsEN%S-8{cfQf@mY?z?(P8%)AL1#uy6$X8~J5z=3toli)X?Hd78PH!>?!D0!me z3dguS)G^hp0xe(!Yy)G@uiyuuIl;Q&q^89l-~z z2~Tx3(|bw3w(7mMypo3JD&nX6ZI_|CO@NsCi(ZmTFwJ>xz`;5E@xgib-q=WHna}$r!Mi|b3XtOU16*M From 0ac566cfcd37606a1e75bfe88e1d0a97551131f6 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Fri, 14 Jun 2024 17:29:36 +0200 Subject: [PATCH 148/313] add new type filters, fix widget checkbox naming --- packages/syft/src/syft/client/syncing.py | 23 +++++-- .../syft/src/syft/service/sync/diff_state.py | 61 +++++++++++-------- .../src/syft/service/sync/resolve_widget.py | 18 +++--- 3 files changed, 65 insertions(+), 37 deletions(-) diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index 9f2cfafd85e..103d1434126 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -1,5 +1,8 @@ # stdlib +# stdlib +from collections.abc import Sequence + # relative from ..abstract_node import NodeSideType from ..node.credentials import SyftVerifyKey @@ -24,7 +27,9 @@ def compare_states( include_ignored: bool = False, include_same: bool = False, filter_by_email: str | None = None, - filter_by_type: str | type | None = None, + include_types: Sequence[str | type] | None = None, + exclude_types: Sequence[str | type] | None = None, + _hide_usercode: bool = True, ) -> NodeDiff | SyftError: # NodeDiff if ( @@ -45,6 +50,11 @@ def compare_states( return SyftError( "Invalid node side types: can only compare a high and low node" ) + + if _hide_usercode: + exclude_types = exclude_types or [] + exclude_types.append("usercode") + return NodeDiff.from_sync_state( low_state=low_state, high_state=high_state, @@ -52,7 +62,8 @@ def compare_states( include_ignored=include_ignored, include_same=include_same, filter_by_email=filter_by_email, - filter_by_type=filter_by_type, + include_types=include_types, + exclude_types=exclude_types, ) @@ -62,7 +73,9 @@ def compare_clients( include_ignored: bool = False, include_same: bool = False, filter_by_email: str | None = None, - filter_by_type: type | None = None, + include_types: Sequence[str | type] | None = None, + exclude_types: Sequence[str | type] | None = None, + _hide_usercode: bool = True, ) -> NodeDiff | SyftError: from_state = from_client.get_sync_state() if isinstance(from_state, SyftError): @@ -78,7 +91,9 @@ def compare_clients( include_ignored=include_ignored, include_same=include_same, filter_by_email=filter_by_email, - filter_by_type=filter_by_type, + include_types=include_types, + exclude_types=exclude_types, + _hide_usercode=_hide_usercode, ) diff --git a/packages/syft/src/syft/service/sync/diff_state.py b/packages/syft/src/syft/service/sync/diff_state.py index bb3840cb0bf..24d89af2fd6 100644 --- a/packages/syft/src/syft/service/sync/diff_state.py +++ b/packages/syft/src/syft/service/sync/diff_state.py @@ -1,5 +1,6 @@ # stdlib from collections.abc import Callable +from collections.abc import Collection from collections.abc import Iterable from dataclasses import dataclass import enum @@ -14,7 +15,6 @@ # third party from loguru import logger import pandas as pd -from pydantic import model_validator from rich import box from rich.console import Console from rich.console import Group @@ -676,7 +676,7 @@ def status(self) -> str: return "NEW" batch_statuses = [ - diff.status for diff in self.get_dependents(include_roots=False) + diff.status for diff in self.get_dependencies(include_roots=False) ] if all(status == "SAME" for status in batch_statuses): return "SAME" @@ -765,6 +765,7 @@ def from_dependencies( cls, root_uid: UID, obj_dependencies: dict[UID, list[UID]], + obj_dependents: dict[UID, list[UID]], obj_uid_to_diff: dict[UID, ObjectDiff], root_ids: list[UID], low_node_uid: UID, @@ -809,15 +810,13 @@ def _build_hierarchy_helper( levels = [level for _, level in batch_uids] batch_uids = {uid for uid, _ in batch_uids} # type: ignore - batch_dependencies = { - uid: [d for d in obj_dependencies.get(uid, []) if d in batch_uids] - for uid in batch_uids - } + return cls( global_diffs=obj_uid_to_diff, global_roots=root_ids, hierarchy_levels=levels, dependencies=obj_dependencies, + dependents=obj_dependents, root_diff=obj_uid_to_diff[root_uid], low_node_uid=low_node_uid, high_node_uid=high_node_uid, @@ -910,15 +909,6 @@ def visual_hierarchy(self) -> tuple[type, dict]: else: raise ValueError(f"Unknown root type: {self.root.obj_type}") - @model_validator(mode="after") - def make_dependents(self) -> Self: - dependents: dict = {} - for parent, children in self.dependencies.items(): - for child in children: - dependents[child] = dependents.get(child, []) + [parent] - self.dependents = dependents - return self - @property def root(self) -> ObjectDiff: return self.root_diff @@ -1195,7 +1185,8 @@ def from_sync_state( include_ignored: bool = False, include_same: bool = False, filter_by_email: str | None = None, - filter_by_type: type | None = None, + include_types: Collection[type | str] | None = None, + exclude_types: Collection[type | str] | None = None, _include_node_status: bool = False, ) -> "NodeDiff": obj_uid_to_diff = {} @@ -1235,8 +1226,9 @@ def from_sync_state( ) obj_uid_to_diff[diff.object_id] = diff + # TODO move static methods to NodeDiff __init__ obj_dependencies = NodeDiff.dependencies_from_states(low_state, high_state) - all_batches = NodeDiff.hierarchies( + all_batches = NodeDiff._create_batches( low_state, high_state, obj_dependencies, @@ -1265,9 +1257,10 @@ def from_sync_state( res._filter( user_email=filter_by_email, - obj_type=filter_by_type, + include_types=include_types, include_ignored=include_ignored, include_same=include_same, + exclude_types=exclude_types, inplace=True, ) @@ -1400,7 +1393,7 @@ def _sort_batches(hierarchies: list[ObjectDiffBatch]) -> list[ObjectDiffBatch]: return sorted_hierarchies @staticmethod - def hierarchies( + def _create_batches( low_sync_state: SyncState, high_sync_state: SyncState, obj_dependencies: dict[UID, list[UID]], @@ -1424,10 +1417,17 @@ def hierarchies( ): root_ids.append(diff.object_id) # type: ignore + # Dependents are the reverse edges of the dependency graph + obj_dependents = {} + for parent, children in obj_dependencies.items(): + for child in children: + obj_dependents[child] = obj_dependencies.get(child, []) + [parent] + for root_uid in root_ids: batch = ObjectDiffBatch.from_dependencies( root_uid, obj_dependencies, + obj_dependents, obj_uid_to_diff, root_ids, low_sync_state.node_uid, @@ -1483,9 +1483,10 @@ def _apply_filters( def _filter( self, user_email: str | None = None, - obj_type: str | type | None = None, include_ignored: bool = False, include_same: bool = False, + include_types: Collection[str | type] | None = None, + exclude_types: Collection[type | str] | None = None, inplace: bool = True, ) -> Self: new_filters = [] @@ -1493,12 +1494,6 @@ def _filter( new_filters.append( NodeDiffFilter(FilterProperty.USER, user_email, operator.eq) ) - if obj_type is not None: - if isinstance(obj_type, type): - obj_type = obj_type.__name__ - new_filters.append( - NodeDiffFilter(FilterProperty.TYPE, obj_type, operator.eq) - ) if not include_ignored: new_filters.append( NodeDiffFilter(FilterProperty.IGNORED, True, operator.ne) @@ -1507,6 +1502,20 @@ def _filter( new_filters.append( NodeDiffFilter(FilterProperty.STATUS, "SAME", operator.ne) ) + if include_types is not None: + include_types_ = { + t.__name__ if isinstance(t, type) else t for t in include_types + } + new_filters.append( + NodeDiffFilter(FilterProperty.TYPE, include_types_, operator.contains) + ) + if exclude_types: + for exclude_type in exclude_types: + if isinstance(exclude_type, type): + exclude_type = exclude_type.__name__ + new_filters.append( + NodeDiffFilter(FilterProperty.TYPE, exclude_type, operator.ne) + ) return self._apply_filters(new_filters, inplace=inplace) diff --git a/packages/syft/src/syft/service/sync/resolve_widget.py b/packages/syft/src/syft/service/sync/resolve_widget.py index b9dadcb319e..09f35d53fcc 100644 --- a/packages/syft/src/syft/service/sync/resolve_widget.py +++ b/packages/syft/src/syft/service/sync/resolve_widget.py @@ -331,7 +331,7 @@ def create_accordion_css( def build_accordion( self, - accordion_body: widgets.Widget, + accordion_body: MainObjectDiffWidget, show_sync_checkbox: bool = True, show_share_private_checkbox: bool = True, ) -> VBox: @@ -368,8 +368,12 @@ def build_accordion( layout=Layout(flex="1"), ) + if isinstance(self.diff.non_empty_object, ActionObject): + share_data_description = "Share real data and approve" + else: + share_data_description = "Share real data" share_private_data_checkbox = Checkbox( - description="Sync Real Data", + description=share_data_description, layout=Layout(width="auto", margin="0 2px 0 0"), ) sync_checkbox = Checkbox( @@ -485,20 +489,20 @@ def batch_diff_widgets(self) -> list[CollapsableObjectDiffWidget]: return dependent_diff_widgets @property - def dependent_batch_diff_widgets(self) -> list[CollapsableObjectDiffWidget]: + def dependent_root_diff_widgets(self) -> list[CollapsableObjectDiffWidget]: dependencies = self.obj_diff_batch.get_dependencies( include_roots=True, include_batch_root=False ) other_roots = [ d for d in dependencies if d.object_id in self.obj_diff_batch.global_roots ] - dependent_root_diff_widgets = [ + widgets = [ CollapsableObjectDiffWidget( diff, direction=self.obj_diff_batch.sync_direction ) for diff in other_roots ] - return dependent_root_diff_widgets + return widgets @property def main_object_diff_widget(self) -> MainObjectDiffWidget: @@ -536,7 +540,7 @@ def build(self) -> VBox: self.id2widget = {} batch_diff_widgets = self.batch_diff_widgets - dependent_batch_diff_widgets = self.dependent_batch_diff_widgets + dependent_batch_diff_widgets = self.dependent_root_diff_widgets main_object_diff_widget = self.main_object_diff_widget self.id2widget[main_object_diff_widget.diff.object_id] = main_object_diff_widget @@ -572,7 +576,7 @@ def build(self) -> VBox: def sync_button(self) -> Button: sync_button = Button( - description="Sync Selected Changes", + description="Apply Selected Changes", style={ "text_color": "#464A91", "button_color": "transparent", From 15d95178620c242feb2f34bd328871ef7e00b59d Mon Sep 17 00:00:00 2001 From: teo Date: Fri, 14 Jun 2024 18:29:50 +0300 Subject: [PATCH 149/313] fix api service --- notebooks/api/0.8/12-custom-api-endpoint.ipynb | 2 +- packages/syft/src/syft/service/api/api_service.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/notebooks/api/0.8/12-custom-api-endpoint.ipynb b/notebooks/api/0.8/12-custom-api-endpoint.ipynb index f84ca9c5c3f..20c269ea286 100644 --- a/notebooks/api/0.8/12-custom-api-endpoint.ipynb +++ b/notebooks/api/0.8/12-custom-api-endpoint.ipynb @@ -574,7 +574,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0rc1" + "version": "3.11.8" } }, "nbformat": 4, diff --git a/packages/syft/src/syft/service/api/api_service.py b/packages/syft/src/syft/service/api/api_service.py index 4084814001c..8ab77ad2667 100644 --- a/packages/syft/src/syft/service/api/api_service.py +++ b/packages/syft/src/syft/service/api/api_service.py @@ -91,7 +91,11 @@ def set( syft_client_verify_key=context.credentials, ) action_service = context.node.get_service("actionservice") - res = action_service.set(context=context, action_object=action_obj) + res = action_service.set_result_to_store( + context=context, + result_action_object=action_obj, + has_result_read_permission=True, + ) if res.is_err(): return SyftError(message=res.err()) From 5ab0feeddff52c9066523b640cc475edd43520f6 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 14 Jun 2024 18:27:55 +0200 Subject: [PATCH 150/313] fix bugs --- packages/syft/src/syft/client/domain_client.py | 14 ++++++++++++-- .../syft/src/syft/service/action/action_object.py | 11 ++++++----- .../src/syft/service/action/action_service.py | 8 ++++++-- packages/syft/src/syft/service/api/api_service.py | 2 +- packages/syft/src/syft/service/dataset/dataset.py | 15 ++++++++++++++- .../src/syft/service/enclave/enclave_service.py | 4 ++-- packages/syft/src/syft/service/queue/zmq_queue.py | 2 +- packages/syft/src/syft/types/twin_object.py | 6 +++--- .../syft/request/request_code_accept_deny_test.py | 4 ++-- 9 files changed, 47 insertions(+), 19 deletions(-) diff --git a/packages/syft/src/syft/client/domain_client.py b/packages/syft/src/syft/client/domain_client.py index 1c768710040..5210ec71bfb 100644 --- a/packages/syft/src/syft/client/domain_client.py +++ b/packages/syft/src/syft/client/domain_client.py @@ -128,18 +128,28 @@ def upload_dataset(self, dataset: CreateDataset) -> SyftSuccess | SyftError: ) as pbar: for asset in dataset.asset_list: try: + contains_empty = asset.contains_empty() twin = TwinObject( private_obj=asset.data, mock_obj=asset.mock, syft_node_location=self.id, syft_client_verify_key=self.verify_key, ) - twin._save_to_blob_storage() + res = twin._save_to_blob_storage(allow_empty=contains_empty) + if isinstance(res, SyftError): + return res except Exception as e: + # stdlib + import traceback + + print(traceback.format_exc()) tqdm.write(f"Failed to create twin for {asset.name}. {e}") return SyftError(message=f"Failed to create twin. {e}") - response = self.api.services.action.set(twin) + response = self.api.services.action.set( + twin, ignore_detached_objs=contains_empty + ) + print(response) if isinstance(response, SyftError): tqdm.write(f"Failed to upload asset: {asset.name}") return response diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 2039513affc..dace2436071 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -752,7 +752,9 @@ def _set_obj_location_(self, node_uid: UID, credentials: SyftVerifyKey) -> None: @property def syft_action_data(self) -> Any: if self.syft_blob_storage_entry_id and self.syft_created_at: - self.reload_cache() + res = self.reload_cache() + if isinstance(res, SyftError): + print(res) return self.syft_action_data_cache @@ -793,8 +795,7 @@ def reload_cache(self) -> SyftError | None: self.syft_action_data_type = type(self.syft_action_data) return None else: - print("cannot reload cache") - return None + return SyftError("Could not reload cache, could not get read method") return None @@ -861,11 +862,11 @@ def _set_reprs(self, data: any) -> None: ) self.syft_action_data_str_ = truncate_str(str(data)) - def _save_to_blob_storage(self) -> SyftError | None: + def _save_to_blob_storage(self, allow_empty: bool = False) -> SyftError | None: data = self.syft_action_data if isinstance(data, SyftError): return data - if isinstance(data, ActionDataEmpty): + if isinstance(data, ActionDataEmpty) and not allow_empty: return SyftError(message=f"cannot store empty object {self.id}") result = self._save_to_blob_storage_(data) if isinstance(result, SyftError): diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 251282ba019..08e8119ee22 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -83,14 +83,18 @@ def set( action_object: ActionObject | TwinObject, add_storage_permission: bool = True, ignore_detached_objs: bool = False, - ) -> Result[ActionObject, str]: - return self._set( + ) -> ActionObject | SyftError: + res = self._set( context, action_object, has_result_read_permission=True, add_storage_permission=add_storage_permission, ignore_detached_objs=ignore_detached_objs, ) + if res.is_err(): + return SyftError(message=res.value) + else: + return res.ok() def is_detached_obj( self, diff --git a/packages/syft/src/syft/service/api/api_service.py b/packages/syft/src/syft/service/api/api_service.py index 4084814001c..134c1c93f78 100644 --- a/packages/syft/src/syft/service/api/api_service.py +++ b/packages/syft/src/syft/service/api/api_service.py @@ -91,7 +91,7 @@ def set( syft_client_verify_key=context.credentials, ) action_service = context.node.get_service("actionservice") - res = action_service.set(context=context, action_object=action_obj) + res = action_service._set(context=context, action_object=action_obj) if res.is_err(): return SyftError(message=res.err()) diff --git a/packages/syft/src/syft/service/dataset/dataset.py b/packages/syft/src/syft/service/dataset/dataset.py index 8bbcf83cf2d..0b9ee62cabc 100644 --- a/packages/syft/src/syft/service/dataset/dataset.py +++ b/packages/syft/src/syft/service/dataset/dataset.py @@ -38,6 +38,8 @@ from ...util.notebook_ui.icons import Icon from ...util.notebook_ui.styles import FONT_CSS from ...util.notebook_ui.styles import ITABLES_CSS +from ..action.action_data_empty import ActionDataEmpty +from ..action.action_object import ActionObject from ..data_subject.data_subject import DataSubject from ..data_subject.data_subject import DataSubjectCreate from ..data_subject.data_subject_service import DataSubjectService @@ -320,6 +322,17 @@ def __mock_is_real_for_empty_mock_must_be_false(self) -> Self: return self + def contains_empty(self) -> bool: + if isinstance(self.mock, ActionObject) and isinstance( + self.mock.syft_action_data_cache, ActionDataEmpty + ): + return True + if isinstance(self.data, ActionObject) and isinstance( + self.data.syft_action_data_cache, ActionDataEmpty + ): + return True + return False + def add_data_subject(self, data_subject: DataSubject) -> None: self.data_subjects.append(data_subject) @@ -694,7 +707,7 @@ def create_and_store_twin(context: TransformContext) -> TransformContext: "f{context}'s node is None, please log in. No trasformation happened" ) action_service = context.node.get_service("actionservice") - result = action_service.set( + result = action_service._set( context=context.to_node_context(), action_object=twin ) if result.is_err(): diff --git a/packages/syft/src/syft/service/enclave/enclave_service.py b/packages/syft/src/syft/service/enclave/enclave_service.py index be19b9c2659..03edc7af6a7 100644 --- a/packages/syft/src/syft/service/enclave/enclave_service.py +++ b/packages/syft/src/syft/service/enclave/enclave_service.py @@ -78,14 +78,14 @@ def send_user_code_inputs_to_enclave( root_context.extra_kwargs = {"has_result_read_permission": True} # TODO: Instead of using the action store, modify to # use the action service directly to store objects - action_service.set(root_context, dict_object) + action_service._set(root_context, dict_object) else: res = action_service.get(uid=user_code_id, context=root_context) if res.is_ok(): dict_object = res.ok() dict_object[str(context.credentials)] = inputs - action_service.set(root_context, dict_object) + action_service._set(root_context, dict_object) else: return SyftError( message=f"Error while fetching the object on Enclave: {res.err()}" diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index decc3fd2e79..c19c89b5421 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -305,7 +305,7 @@ def preprocess_action_arg(self, arg: UID) -> UID | None: id=action_object.id, syft_blob_storage_entry_id=action_object.syft_blob_storage_entry_id, ) - res = self.action_service.set( + res = self.action_service._set( context=self.auth_context, action_object=new_action_object ) return None diff --git a/packages/syft/src/syft/types/twin_object.py b/packages/syft/src/syft/types/twin_object.py index 3bfd47c3d71..f94d75744d6 100644 --- a/packages/syft/src/syft/types/twin_object.py +++ b/packages/syft/src/syft/types/twin_object.py @@ -82,7 +82,7 @@ def mock(self) -> ActionObject: mock.id = twin_id return mock - def _save_to_blob_storage(self) -> SyftError | None: + def _save_to_blob_storage(self, allow_empty: bool = False) -> SyftError | None: # Set node location and verify key self.private_obj._set_obj_location_( self.syft_node_location, @@ -92,10 +92,10 @@ def _save_to_blob_storage(self) -> SyftError | None: self.syft_node_location, self.syft_client_verify_key, ) - mock_store_res = self.mock_obj._save_to_blob_storage() + mock_store_res = self.mock_obj._save_to_blob_storage(allow_empty=allow_empty) if isinstance(mock_store_res, SyftError): return mock_store_res - return self.private_obj._save_to_blob_storage() + return self.private_obj._save_to_blob_storage(allow_empty=allow_empty) def send(self, client: SyftClient, add_storage_permission: bool = True) -> Any: self._set_obj_location_(client.id, client.verify_key) diff --git a/packages/syft/tests/syft/request/request_code_accept_deny_test.py b/packages/syft/tests/syft/request/request_code_accept_deny_test.py index 9e63ea592a8..4a14db08248 100644 --- a/packages/syft/tests/syft/request/request_code_accept_deny_test.py +++ b/packages/syft/tests/syft/request/request_code_accept_deny_test.py @@ -120,7 +120,7 @@ def test_user_code_status_change(faker: Faker, worker: Worker): root_client = worker.root_client dummy_data = [1, 2, 3] data = ActionObject.from_obj(dummy_data) - action_obj = root_client.api.services.action.set(data) + action_obj = data.send(root_client) ds_client = get_ds_client(faker, root_client, worker.guest_client) @@ -168,7 +168,7 @@ def test_code_accept_deny(faker: Faker, worker: Worker): root_client = worker.root_client dummy_data = [1, 2, 3] data = ActionObject.from_obj(dummy_data) - action_obj = root_client.api.services.action.set(data) + action_obj = data.send(root_client) ds_client = get_ds_client(faker, root_client, worker.guest_client) From 24aaf298e1d129f35508e05b8455f032ae65681a Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Sat, 15 Jun 2024 15:15:04 +0200 Subject: [PATCH 151/313] fix tests --- packages/syft/src/syft/client/domain_client.py | 5 ----- .../src/syft/service/action/action_object.py | 1 - .../syft/src/syft/service/dataset/dataset.py | 16 +++++++++++++--- packages/syft/src/syft/service/queue/queue.py | 7 ------- .../syft/src/syft/service/queue/zmq_queue.py | 2 -- packages/syft/tests/syft/dataset/fixtures.py | 2 ++ 6 files changed, 15 insertions(+), 18 deletions(-) diff --git a/packages/syft/src/syft/client/domain_client.py b/packages/syft/src/syft/client/domain_client.py index 5210ec71bfb..48442376d9b 100644 --- a/packages/syft/src/syft/client/domain_client.py +++ b/packages/syft/src/syft/client/domain_client.py @@ -139,17 +139,12 @@ def upload_dataset(self, dataset: CreateDataset) -> SyftSuccess | SyftError: if isinstance(res, SyftError): return res except Exception as e: - # stdlib - import traceback - - print(traceback.format_exc()) tqdm.write(f"Failed to create twin for {asset.name}. {e}") return SyftError(message=f"Failed to create twin. {e}") response = self.api.services.action.set( twin, ignore_detached_objs=contains_empty ) - print(response) if isinstance(response, SyftError): tqdm.write(f"Failed to upload asset: {asset.name}") return response diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index dace2436071..cf587cc4872 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -755,7 +755,6 @@ def syft_action_data(self) -> Any: res = self.reload_cache() if isinstance(res, SyftError): print(res) - return self.syft_action_data_cache def reload_cache(self) -> SyftError | None: diff --git a/packages/syft/src/syft/service/dataset/dataset.py b/packages/syft/src/syft/service/dataset/dataset.py index 0b9ee62cabc..9964185ba82 100644 --- a/packages/syft/src/syft/service/dataset/dataset.py +++ b/packages/syft/src/syft/service/dataset/dataset.py @@ -698,17 +698,27 @@ def create_and_store_twin(context: TransformContext) -> TransformContext: if private_obj is None and mock_obj is None: raise ValueError("No data and no action_id means this asset has no data") + asset = context.obj + contains_empty = asset.contains_empty() twin = TwinObject( - private_obj=private_obj, - mock_obj=mock_obj, + private_obj=asset.data, + mock_obj=asset.mock, + syft_node_location=asset.syft_node_location, + syft_client_verify_key=asset.syft_client_verify_key, ) + res = twin._save_to_blob_storage(allow_empty=contains_empty) + if isinstance(res, SyftError): + raise ValueError(res.message) + + # TODO, upload to blob storage here if context.node is None: raise ValueError( "f{context}'s node is None, please log in. No trasformation happened" ) action_service = context.node.get_service("actionservice") result = action_service._set( - context=context.to_node_context(), action_object=twin + context=context.to_node_context(), + action_object=twin, ) if result.is_err(): raise RuntimeError(f"Failed to create and store twin. Error: {result}") diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index 1c6cb8467ec..968e4b7c975 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -203,13 +203,6 @@ def handle_message_multiprocessing( context=context, user_verify_key=credentials, ) - # import syft as sy - # res = [(x, x.syft_blob_storage_entry_id) if isinstance(x, sy.ActionObject) \ - # else (x, x.private.syft_blob_storage_entry_id) - # for x in context.node.action_store.data.values()] - # import sys - # print("KWARGS", kwargs, kwargs["data"]) - # print(res, file=sys.stderr) result: Any = call_method(context, *queue_item.args, **queue_item.kwargs) diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index c19c89b5421..4559832f199 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -255,8 +255,6 @@ def unwrap_nested_actionobjects(self, data: Any) -> Any: raise ValueError( "More than double nesting of ActionObjects is currently not supported" ) - # nested_res.syft_node_location = res.syft_node_location - # nested_res.syft_client_verify_key = res.syft_client_verify_key return nested_res return data diff --git a/packages/syft/tests/syft/dataset/fixtures.py b/packages/syft/tests/syft/dataset/fixtures.py index 7d92e1104bd..ca57f7e9220 100644 --- a/packages/syft/tests/syft/dataset/fixtures.py +++ b/packages/syft/tests/syft/dataset/fixtures.py @@ -47,6 +47,8 @@ def mock_asset(worker, root_domain_client) -> Asset: node_uid=worker.id, uploader=uploader, contributors=[uploader], + syft_node_location=worker.id, + syft_client_verify_key=root_domain_client.credentials.verify_key, ) node_transform_context = TransformContext( node=worker, From 2fddcde528efc62556504ebdc7f72f21786eddc0 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Sat, 15 Jun 2024 15:37:48 +0200 Subject: [PATCH 152/313] mypy --- packages/syft/src/syft/service/dataset/dataset.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/dataset/dataset.py b/packages/syft/src/syft/service/dataset/dataset.py index 9964185ba82..f482197f348 100644 --- a/packages/syft/src/syft/service/dataset/dataset.py +++ b/packages/syft/src/syft/service/dataset/dataset.py @@ -4,6 +4,7 @@ from enum import Enum import textwrap from typing import Any +from typing import cast # third party from IPython.display import display @@ -698,7 +699,7 @@ def create_and_store_twin(context: TransformContext) -> TransformContext: if private_obj is None and mock_obj is None: raise ValueError("No data and no action_id means this asset has no data") - asset = context.obj + asset = cast(context.obj, CreateAsset) contains_empty = asset.contains_empty() twin = TwinObject( private_obj=asset.data, From f10a9f3c1a94f0a307948ef9705600d53d8f66b4 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Sat, 15 Jun 2024 16:21:59 +0200 Subject: [PATCH 153/313] lintlintlint --- packages/syft/src/syft/service/dataset/dataset.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/service/dataset/dataset.py b/packages/syft/src/syft/service/dataset/dataset.py index f482197f348..9dde84429c4 100644 --- a/packages/syft/src/syft/service/dataset/dataset.py +++ b/packages/syft/src/syft/service/dataset/dataset.py @@ -4,7 +4,6 @@ from enum import Enum import textwrap from typing import Any -from typing import cast # third party from IPython.display import display @@ -699,13 +698,13 @@ def create_and_store_twin(context: TransformContext) -> TransformContext: if private_obj is None and mock_obj is None: raise ValueError("No data and no action_id means this asset has no data") - asset = cast(context.obj, CreateAsset) - contains_empty = asset.contains_empty() + asset = context.obj # type: ignore + contains_empty = asset.contains_empty() # type: ignore twin = TwinObject( - private_obj=asset.data, - mock_obj=asset.mock, - syft_node_location=asset.syft_node_location, - syft_client_verify_key=asset.syft_client_verify_key, + private_obj=asset.data, # type: ignore + mock_obj=asset.mock, # type: ignore + syft_node_location=asset.syft_node_location, # type: ignore + syft_client_verify_key=asset.syft_client_verify_key, # type: ignore ) res = twin._save_to_blob_storage(allow_empty=contains_empty) if isinstance(res, SyftError): From 2876fd85d1d8ef3604098f8e90996b8bb7686aa6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jun 2024 02:30:59 +0000 Subject: [PATCH 154/313] Bump crazy-max/ghaction-setup-docker from 3.2.0 to 3.3.0 Bumps [crazy-max/ghaction-setup-docker](https://github.com/crazy-max/ghaction-setup-docker) from 3.2.0 to 3.3.0. - [Release notes](https://github.com/crazy-max/ghaction-setup-docker/releases) - [Commits](https://github.com/crazy-max/ghaction-setup-docker/compare/v3.2.0...v3.3.0) --- updated-dependencies: - dependency-name: crazy-max/ghaction-setup-docker dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/pr-tests-frontend.yml | 2 +- .github/workflows/pr-tests-syft.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml index bf36991a385..ae5669e4dc9 100644 --- a/.github/workflows/pr-tests-frontend.yml +++ b/.github/workflows/pr-tests-frontend.yml @@ -67,7 +67,7 @@ jobs: - name: Docker on MacOS if: steps.changes.outputs.frontend == 'true' && matrix.os == 'macos-latest' - uses: crazy-max/ghaction-setup-docker@v3.2.0 + uses: crazy-max/ghaction-setup-docker@v3.3.0 - name: Install Tox if: steps.changes.outputs.frontend == 'true' diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml index 046dea143e0..425e3468438 100644 --- a/.github/workflows/pr-tests-syft.yml +++ b/.github/workflows/pr-tests-syft.yml @@ -86,7 +86,7 @@ jobs: # - name: Docker on MacOS # if: steps.changes.outputs.syft == 'true' && matrix.os == 'macos-latest' - # uses: crazy-max/ghaction-setup-docker@v3.2.0 + # uses: crazy-max/ghaction-setup-docker@v3.3.0 # with: # set-host: true @@ -278,7 +278,7 @@ jobs: - name: Docker on MacOS if: (steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true') && matrix.os == 'macos-latest' - uses: crazy-max/ghaction-setup-docker@v3.2.0 + uses: crazy-max/ghaction-setup-docker@v3.3.0 - name: Docker Compose on MacOS if: (steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true') && matrix.os == 'macos-latest' From e73f2673c4df0c96dfaaa9e219b9188a35c1ba15 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Mon, 17 Jun 2024 14:31:00 +1000 Subject: [PATCH 155/313] Added getting started stubs --- .../getting-started/01-installing-syft.ipynb | 47 ++++++++++++++++ .../02-running-python-server.ipynb | 47 ++++++++++++++++ .../03-configuring-domain.ipynb | 37 +++++++++++++ .../getting-started/04-uploading-data.ipynb | 48 +++++++++++++++++ .../getting-started/05-adding-users.ipynb | 45 ++++++++++++++++ .../06-working-with-remote-data.ipynb | 53 +++++++++++++++++++ .../07-reviewing-user-code.ipynb | 48 +++++++++++++++++ .../getting-started/08-running-code.ipynb | 48 +++++++++++++++++ .../README.md | 0 .../{sync => reverse-tunnel}/README.md | 0 10 files changed, 373 insertions(+) create mode 100644 notebooks/scenarios/getting-started/01-installing-syft.ipynb create mode 100644 notebooks/scenarios/getting-started/02-running-python-server.ipynb create mode 100644 notebooks/scenarios/getting-started/03-configuring-domain.ipynb create mode 100644 notebooks/scenarios/getting-started/04-uploading-data.ipynb create mode 100644 notebooks/scenarios/getting-started/05-adding-users.ipynb create mode 100644 notebooks/scenarios/getting-started/06-working-with-remote-data.ipynb create mode 100644 notebooks/scenarios/getting-started/07-reviewing-user-code.ipynb create mode 100644 notebooks/scenarios/getting-started/08-running-code.ipynb rename notebooks/scenarios/{reverse_tunnel => getting-started}/README.md (100%) rename notebooks/scenarios/{sync => reverse-tunnel}/README.md (100%) diff --git a/notebooks/scenarios/getting-started/01-installing-syft.ipynb b/notebooks/scenarios/getting-started/01-installing-syft.ipynb new file mode 100644 index 00000000000..1926fbff596 --- /dev/null +++ b/notebooks/scenarios/getting-started/01-installing-syft.ipynb @@ -0,0 +1,47 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "1340105f-d81f-453a-8ef7-6d51453dd3ad", + "metadata": {}, + "outputs": [], + "source": [ + "# -- pip install syft\n", + "# -- conda? (pycapnp lib bug??)\n", + "# -- accessing betas\n", + "# -- checking the version you have installed\n", + "# -- using the same version as the server" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ddb5738-99a2-4d93-9f14-cfa3ac06d3c9", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/getting-started/02-running-python-server.ipynb b/notebooks/scenarios/getting-started/02-running-python-server.ipynb new file mode 100644 index 00000000000..54eca1a1431 --- /dev/null +++ b/notebooks/scenarios/getting-started/02-running-python-server.ipynb @@ -0,0 +1,47 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "5ac9669d-4c1f-4b29-b6fb-abec508649e7", + "metadata": {}, + "outputs": [], + "source": [ + "# -- configuration options for demo / dev mode etc\n", + "# -- how to see the web server is running on port x\n", + "# -- how to make a server accessible to other users (brief explanation of networking)\n", + "# -- Optional: for testing purposes you can use bore: https://github.com/ekzhang/bore for free\n", + "# -- Note: production mode is recommended to use kubernetes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c204ff56-a6c6-4031-9ce2-f6b7a875af20", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/getting-started/03-configuring-domain.ipynb b/notebooks/scenarios/getting-started/03-configuring-domain.ipynb new file mode 100644 index 00000000000..b152a87d872 --- /dev/null +++ b/notebooks/scenarios/getting-started/03-configuring-domain.ipynb @@ -0,0 +1,37 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "9278cdde-2ddc-493f-ab1a-7a224f2c2338", + "metadata": {}, + "outputs": [], + "source": [ + "# -- all the configuration and node settings options\n", + "# -- optional: adding email settings (smtp with sendgrid free account)\n", + "# -- changing passwords (importance of root account)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/getting-started/04-uploading-data.ipynb b/notebooks/scenarios/getting-started/04-uploading-data.ipynb new file mode 100644 index 00000000000..1ba607547bb --- /dev/null +++ b/notebooks/scenarios/getting-started/04-uploading-data.ipynb @@ -0,0 +1,48 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "c4db03f1-f663-42e3-b033-09aa817acbbb", + "metadata": {}, + "outputs": [], + "source": [ + "# -- what kinds of data are supported\n", + "# -- how to structure your data\n", + "# -- mock data and how to create some\n", + "# -- how much data you can store (Note: k8s requires blob storage configuration)\n", + "# -- adding metadata and uploading\n", + "# -- how to change the data later" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "150862d6-0395-4fbb-ad19-9bdae91e33fa", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/getting-started/05-adding-users.ipynb b/notebooks/scenarios/getting-started/05-adding-users.ipynb new file mode 100644 index 00000000000..881fb6a5a01 --- /dev/null +++ b/notebooks/scenarios/getting-started/05-adding-users.ipynb @@ -0,0 +1,45 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "e5d3de5f-afee-4743-89c8-8298b12edfe7", + "metadata": {}, + "outputs": [], + "source": [ + "# -- how to enable / disable user registration\n", + "# -- how to create users\n", + "# -- how to reset user passwords" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3ac0c932-601f-4424-ad80-f02da291b2a3", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/getting-started/06-working-with-remote-data.ipynb b/notebooks/scenarios/getting-started/06-working-with-remote-data.ipynb new file mode 100644 index 00000000000..6b343e978c1 --- /dev/null +++ b/notebooks/scenarios/getting-started/06-working-with-remote-data.ipynb @@ -0,0 +1,53 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "bc1a03b7-05d5-41bb-bc9e-0361769d5c7e", + "metadata": {}, + "outputs": [], + "source": [ + "# -- browsing datasets\n", + "# -- getting a pointer\n", + "# -- mock vs private\n", + "# -- Pointer UIDs\n", + "# -- choosing an input policy\n", + "# -- choosing an output policy\n", + "# -- using the syft function decorator\n", + "# -- testing code locally\n", + "# -- submitting code for approval\n", + "# -- code is denied\n", + "# -- changing code and re-uploading a new version" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "37adcb64-bf59-4a3f-9eab-31814fd1c675", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/getting-started/07-reviewing-user-code.ipynb b/notebooks/scenarios/getting-started/07-reviewing-user-code.ipynb new file mode 100644 index 00000000000..a35e4695f42 --- /dev/null +++ b/notebooks/scenarios/getting-started/07-reviewing-user-code.ipynb @@ -0,0 +1,48 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "f9535594-f1a2-48a6-87b1-a608f1fa7520", + "metadata": {}, + "outputs": [], + "source": [ + "# -- notifications of code requests\n", + "# -- requests queue\n", + "# -- reviewing code\n", + "# -- carefully testing code\n", + "# -- approve / deny code\n", + "# -- substituting a result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fa2e6a39-1c10-4eec-bbce-452afde0f10b", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/getting-started/08-running-code.ipynb b/notebooks/scenarios/getting-started/08-running-code.ipynb new file mode 100644 index 00000000000..383f19f4f82 --- /dev/null +++ b/notebooks/scenarios/getting-started/08-running-code.ipynb @@ -0,0 +1,48 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "6f674a77-fcb7-4e99-87a1-daf12094d3aa", + "metadata": {}, + "outputs": [], + "source": [ + "# -- executing approved code\n", + "# -- working with jobs\n", + "# -- refreshing\n", + "# -- viewing logs\n", + "# -- getting final result\n", + "# -- success" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "363c56db-4c7c-458d-9a63-108eda0fcef7", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/reverse_tunnel/README.md b/notebooks/scenarios/getting-started/README.md similarity index 100% rename from notebooks/scenarios/reverse_tunnel/README.md rename to notebooks/scenarios/getting-started/README.md diff --git a/notebooks/scenarios/sync/README.md b/notebooks/scenarios/reverse-tunnel/README.md similarity index 100% rename from notebooks/scenarios/sync/README.md rename to notebooks/scenarios/reverse-tunnel/README.md From 87d13464f4ee86683e8568ffe60e04e2408cbe6e Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Mon, 17 Jun 2024 16:02:56 +1000 Subject: [PATCH 156/313] Adding bigquery and reverse-tunnel scenarios --- .../scenarios/bigquery/01-setup-on-gcp.ipynb | 48 +++++++++++++++++++ .../bigquery/02-deploying-to-gke.ipynb | 45 +++++++++++++++++ .../bigquery/03-custom-worker-pool.ipynb | 41 ++++++++++++++++ .../scenarios/bigquery/04-adding-users.ipynb | 37 ++++++++++++++ .../bigquery/05-custom-api-endpoints.ipynb | 45 +++++++++++++++++ ...ng-with-remote-apis-and-worker-pools.ipynb | 42 ++++++++++++++++ .../bigquery/07-reviewing-user-code.ipynb | 46 ++++++++++++++++++ .../scenarios/bigquery/08-running-code.ipynb | 40 ++++++++++++++++ .../01-why-reverse-tunnel.ipynb | 36 ++++++++++++++ .../reverse-tunnel/02-creating-gateway.ipynb | 36 ++++++++++++++ .../03-network-configuration.ipynb | 37 ++++++++++++++ .../04-setup-domain-with-tunnel.ipynb | 36 ++++++++++++++ .../05-connect-to-gateway-over-tunnel.ipynb | 36 ++++++++++++++ .../reverse-tunnel/06-proxy-clients.ipynb | 36 ++++++++++++++ .../07-blob-storage-streaming.ipynb | 35 ++++++++++++++ .../reverse-tunnel/08-debugging-tunnel.ipynb | 38 +++++++++++++++ 16 files changed, 634 insertions(+) create mode 100644 notebooks/scenarios/bigquery/01-setup-on-gcp.ipynb create mode 100644 notebooks/scenarios/bigquery/02-deploying-to-gke.ipynb create mode 100644 notebooks/scenarios/bigquery/03-custom-worker-pool.ipynb create mode 100644 notebooks/scenarios/bigquery/04-adding-users.ipynb create mode 100644 notebooks/scenarios/bigquery/05-custom-api-endpoints.ipynb create mode 100644 notebooks/scenarios/bigquery/06-working-with-remote-apis-and-worker-pools.ipynb create mode 100644 notebooks/scenarios/bigquery/07-reviewing-user-code.ipynb create mode 100644 notebooks/scenarios/bigquery/08-running-code.ipynb create mode 100644 notebooks/scenarios/reverse-tunnel/01-why-reverse-tunnel.ipynb create mode 100644 notebooks/scenarios/reverse-tunnel/02-creating-gateway.ipynb create mode 100644 notebooks/scenarios/reverse-tunnel/03-network-configuration.ipynb create mode 100644 notebooks/scenarios/reverse-tunnel/04-setup-domain-with-tunnel.ipynb create mode 100644 notebooks/scenarios/reverse-tunnel/05-connect-to-gateway-over-tunnel.ipynb create mode 100644 notebooks/scenarios/reverse-tunnel/06-proxy-clients.ipynb create mode 100644 notebooks/scenarios/reverse-tunnel/07-blob-storage-streaming.ipynb create mode 100644 notebooks/scenarios/reverse-tunnel/08-debugging-tunnel.ipynb diff --git a/notebooks/scenarios/bigquery/01-setup-on-gcp.ipynb b/notebooks/scenarios/bigquery/01-setup-on-gcp.ipynb new file mode 100644 index 00000000000..ee727e8c9cb --- /dev/null +++ b/notebooks/scenarios/bigquery/01-setup-on-gcp.ipynb @@ -0,0 +1,48 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "453d340a-4dac-4407-9003-3e99e8de4aca", + "metadata": {}, + "outputs": [], + "source": [ + "# -- creating a project\n", + "# -- gke\n", + "# -- artifact registry\n", + "# -- gcs bucket\n", + "# -- bigquery instance\n", + "# -- service account" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "78e4a96e-9981-4021-9f08-6a29342755c9", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/bigquery/02-deploying-to-gke.ipynb b/notebooks/scenarios/bigquery/02-deploying-to-gke.ipynb new file mode 100644 index 00000000000..fcdfe9597f7 --- /dev/null +++ b/notebooks/scenarios/bigquery/02-deploying-to-gke.ipynb @@ -0,0 +1,45 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "6e61a7e1-ff80-436e-a7ed-25e88d148e64", + "metadata": {}, + "outputs": [], + "source": [ + "# -- helm install\n", + "# -- configuring seaweed blob storage\n", + "# -- ingress" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7246355d-a13a-429c-88ca-4afe4b6e7a4d", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/bigquery/03-custom-worker-pool.ipynb b/notebooks/scenarios/bigquery/03-custom-worker-pool.ipynb new file mode 100644 index 00000000000..0ff89e8753f --- /dev/null +++ b/notebooks/scenarios/bigquery/03-custom-worker-pool.ipynb @@ -0,0 +1,41 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "786ccd6c-d55a-40e9-8d5f-27deba80332d", + "metadata": {}, + "outputs": [], + "source": [ + "# -- configuring artifact registry\n", + "# -- building a custom image with bigquery and pushing\n", + "# -- adding worker pool\n", + "# -- optional: deploying an image which is pre-built\n", + "# -- optional: adding service account via labels\n", + "# -- setting default worker pool to custom pool\n", + "# -- scaling down `default-worker-pool`" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/bigquery/04-adding-users.ipynb b/notebooks/scenarios/bigquery/04-adding-users.ipynb new file mode 100644 index 00000000000..2721865882a --- /dev/null +++ b/notebooks/scenarios/bigquery/04-adding-users.ipynb @@ -0,0 +1,37 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "3ba045e3-68be-4a0e-8e99-7721c66d1999", + "metadata": {}, + "outputs": [], + "source": [ + "# -- how to enable user registration\n", + "# -- how to create users\n", + "# -- how to reset user passwords" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/bigquery/05-custom-api-endpoints.ipynb b/notebooks/scenarios/bigquery/05-custom-api-endpoints.ipynb new file mode 100644 index 00000000000..47ff98c6d80 --- /dev/null +++ b/notebooks/scenarios/bigquery/05-custom-api-endpoints.ipynb @@ -0,0 +1,45 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "248f62a8-94b6-481d-a32b-03d5c2b67ae0", + "metadata": {}, + "outputs": [], + "source": [ + "# -- adding custom endpoints that use custom pools\n", + "# -- mock vs private\n", + "# -- context.node\n", + "# -- settings and state\n", + "# -- service account via settings or k8s labels\n", + "# -- how to clear state\n", + "# -- how to implement a basic rate limiter using context.user\n", + "# -- creating a big query endpoint with a try catch\n", + "# -- how to delete / replace api endpoints\n", + "# -- testing endpoints and their mock / private versions\n", + "# -- executing a large query and checking blob storage" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/bigquery/06-working-with-remote-apis-and-worker-pools.ipynb b/notebooks/scenarios/bigquery/06-working-with-remote-apis-and-worker-pools.ipynb new file mode 100644 index 00000000000..744923e5b68 --- /dev/null +++ b/notebooks/scenarios/bigquery/06-working-with-remote-apis-and-worker-pools.ipynb @@ -0,0 +1,42 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "ddc9b9f5-a236-4ab2-a99d-0a2bd4a28f5f", + "metadata": {}, + "outputs": [], + "source": [ + "# -- browsing api endpoints\n", + "# -- listing worker pools\n", + "# -- mock vs private execution\n", + "# -- calling an endpoint directly\n", + "# -- using an endpoint in a syft function with worker-pool\n", + "# -- doing some small analysis on the results\n", + "# -- testing and submitting for approval\n", + "# -- users calling" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/bigquery/07-reviewing-user-code.ipynb b/notebooks/scenarios/bigquery/07-reviewing-user-code.ipynb new file mode 100644 index 00000000000..3791163a12c --- /dev/null +++ b/notebooks/scenarios/bigquery/07-reviewing-user-code.ipynb @@ -0,0 +1,46 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "1f2da72c-33a1-4edc-8ab5-dc9b83ce2af8", + "metadata": {}, + "outputs": [], + "source": [ + "# -- requests queue\n", + "# -- reviewing code\n", + "# -- carefully testing code\n", + "# -- approve / deny code" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "446fefdc-50a0-4d44-9ce4-d870432857c9", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/bigquery/08-running-code.ipynb b/notebooks/scenarios/bigquery/08-running-code.ipynb new file mode 100644 index 00000000000..b64a240fe90 --- /dev/null +++ b/notebooks/scenarios/bigquery/08-running-code.ipynb @@ -0,0 +1,40 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "d7ad91a8-7b45-4d3f-8733-568d07a2b3b5", + "metadata": {}, + "outputs": [], + "source": [ + "# -- executing approved code\n", + "# -- working with jobs\n", + "# -- viewing logs\n", + "# -- refreshing\n", + "# -- getting final result\n", + "# -- success" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/reverse-tunnel/01-why-reverse-tunnel.ipynb b/notebooks/scenarios/reverse-tunnel/01-why-reverse-tunnel.ipynb new file mode 100644 index 00000000000..47de024979e --- /dev/null +++ b/notebooks/scenarios/reverse-tunnel/01-why-reverse-tunnel.ipynb @@ -0,0 +1,36 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "9a9dbce4-7d8d-4bad-a758-25d4696f0bb8", + "metadata": {}, + "outputs": [], + "source": [ + "# -- NAT Firewall problem\n", + "# -- Solution: rathole" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/reverse-tunnel/02-creating-gateway.ipynb b/notebooks/scenarios/reverse-tunnel/02-creating-gateway.ipynb new file mode 100644 index 00000000000..44003fc1edc --- /dev/null +++ b/notebooks/scenarios/reverse-tunnel/02-creating-gateway.ipynb @@ -0,0 +1,36 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "66b4e68c-73a9-4064-802a-4fd96887b3f6", + "metadata": {}, + "outputs": [], + "source": [ + "# -- helm install\n", + "# -- deploy gateway k8s to azure" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/reverse-tunnel/03-network-configuration.ipynb b/notebooks/scenarios/reverse-tunnel/03-network-configuration.ipynb new file mode 100644 index 00000000000..477148a7c33 --- /dev/null +++ b/notebooks/scenarios/reverse-tunnel/03-network-configuration.ipynb @@ -0,0 +1,37 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "6296126b-2575-44b4-9f09-2d73f444ca96", + "metadata": {}, + "outputs": [], + "source": [ + "# -- ingress\n", + "# -- open port for rathole server\n", + "# -- nodeport vs websockets" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/reverse-tunnel/04-setup-domain-with-tunnel.ipynb b/notebooks/scenarios/reverse-tunnel/04-setup-domain-with-tunnel.ipynb new file mode 100644 index 00000000000..decca53c96d --- /dev/null +++ b/notebooks/scenarios/reverse-tunnel/04-setup-domain-with-tunnel.ipynb @@ -0,0 +1,36 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "a2746b76-cc79-4853-8ef5-98c63b516eab", + "metadata": {}, + "outputs": [], + "source": [ + "# -- deploy local docker k3d\n", + "# -- enable rathole reverse tunnel" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/reverse-tunnel/05-connect-to-gateway-over-tunnel.ipynb b/notebooks/scenarios/reverse-tunnel/05-connect-to-gateway-over-tunnel.ipynb new file mode 100644 index 00000000000..c6391b82185 --- /dev/null +++ b/notebooks/scenarios/reverse-tunnel/05-connect-to-gateway-over-tunnel.ipynb @@ -0,0 +1,36 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "ad6b54cd-2898-4b67-af54-ddc7a13173e2", + "metadata": {}, + "outputs": [], + "source": [ + "# -- run connection request\n", + "# -- approve request" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/reverse-tunnel/06-proxy-clients.ipynb b/notebooks/scenarios/reverse-tunnel/06-proxy-clients.ipynb new file mode 100644 index 00000000000..536dce15404 --- /dev/null +++ b/notebooks/scenarios/reverse-tunnel/06-proxy-clients.ipynb @@ -0,0 +1,36 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "1c3868bb-a140-451a-ac51-a4fd17bf2ab8", + "metadata": {}, + "outputs": [], + "source": [ + "# -- how to list domains on gateway\n", + "# -- getting a proxy client" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/reverse-tunnel/07-blob-storage-streaming.ipynb b/notebooks/scenarios/reverse-tunnel/07-blob-storage-streaming.ipynb new file mode 100644 index 00000000000..b2d388246e9 --- /dev/null +++ b/notebooks/scenarios/reverse-tunnel/07-blob-storage-streaming.ipynb @@ -0,0 +1,35 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "2927dc60-0a36-4a47-8dac-98c8aca71cfe", + "metadata": {}, + "outputs": [], + "source": [ + "# -- checking upload and download to blob storage work" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/reverse-tunnel/08-debugging-tunnel.ipynb b/notebooks/scenarios/reverse-tunnel/08-debugging-tunnel.ipynb new file mode 100644 index 00000000000..edabd3ff979 --- /dev/null +++ b/notebooks/scenarios/reverse-tunnel/08-debugging-tunnel.ipynb @@ -0,0 +1,38 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "c3692642-b407-4471-9467-bcc9ca9bb7a9", + "metadata": {}, + "outputs": [], + "source": [ + "# -- include cleaned up diagram?\n", + "# -- tunnel config file and config maps\n", + "# -- running curl from inside the containers\n", + "# -- the internal host / header on the gateway" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 6032681517ff255e69a90f89f08c70534eface0d Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Mon, 17 Jun 2024 16:52:28 +1000 Subject: [PATCH 157/313] Added enclave notebooks - Tweaked a few other scenarios --- .../scenarios/bigquery/09-debugging.ipynb | 45 ++++++++++++++++ .../enclave/01-primary-domain-setup.ipynb | 38 ++++++++++++++ .../enclave/02-manual-enclave-setup.ipynb | 38 ++++++++++++++ .../enclave/03-secondary-domain-setup.ipynb | 37 ++++++++++++++ .../enclave/04-data-scientist-join.ipynb | 51 +++++++++++++++++++ .../scenarios/enclave/05-domains-review.ipynb | 41 +++++++++++++++ .../enclave/06-manual-execution.ipynb | 46 +++++++++++++++++ .../enclave/07-audit-project-logs.ipynb | 36 +++++++++++++ .../enclave/08-enclave-shutdown.ipynb | 35 +++++++++++++ .../01-why-reverse-tunnel.ipynb | 1 + ...ugging-tunnel.ipynb => 08-debugging.ipynb} | 4 +- 11 files changed, 371 insertions(+), 1 deletion(-) create mode 100644 notebooks/scenarios/bigquery/09-debugging.ipynb create mode 100644 notebooks/scenarios/enclave/01-primary-domain-setup.ipynb create mode 100644 notebooks/scenarios/enclave/02-manual-enclave-setup.ipynb create mode 100644 notebooks/scenarios/enclave/03-secondary-domain-setup.ipynb create mode 100644 notebooks/scenarios/enclave/04-data-scientist-join.ipynb create mode 100644 notebooks/scenarios/enclave/05-domains-review.ipynb create mode 100644 notebooks/scenarios/enclave/06-manual-execution.ipynb create mode 100644 notebooks/scenarios/enclave/07-audit-project-logs.ipynb create mode 100644 notebooks/scenarios/enclave/08-enclave-shutdown.ipynb rename notebooks/scenarios/reverse-tunnel/{08-debugging-tunnel.ipynb => 08-debugging.ipynb} (83%) diff --git a/notebooks/scenarios/bigquery/09-debugging.ipynb b/notebooks/scenarios/bigquery/09-debugging.ipynb new file mode 100644 index 00000000000..b1e5b0237e4 --- /dev/null +++ b/notebooks/scenarios/bigquery/09-debugging.ipynb @@ -0,0 +1,45 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "af997f01-452e-4f18-affa-d5e3f7feb87a", + "metadata": {}, + "outputs": [], + "source": [ + "# -- common issues and where to look\n", + "# -- jobs, blob storage, queues, service accounts, seaweedfs, gcs\n", + "# -- custom worker images, registry, pull permissions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "92ee79f2-07b5-4fb8-a2f4-05f006b2fabb", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/enclave/01-primary-domain-setup.ipynb b/notebooks/scenarios/enclave/01-primary-domain-setup.ipynb new file mode 100644 index 00000000000..77086333a0a --- /dev/null +++ b/notebooks/scenarios/enclave/01-primary-domain-setup.ipynb @@ -0,0 +1,38 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "f9f7fba1-43f8-48bc-a45d-46530574d010", + "metadata": {}, + "outputs": [], + "source": [ + "# -- upload model tensor\n", + "# -- create user account\n", + "# -- phase 2 add model hosting\n", + "# -- phase 3 run on kubernetes" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/enclave/02-manual-enclave-setup.ipynb b/notebooks/scenarios/enclave/02-manual-enclave-setup.ipynb new file mode 100644 index 00000000000..0f1b68dddf2 --- /dev/null +++ b/notebooks/scenarios/enclave/02-manual-enclave-setup.ipynb @@ -0,0 +1,38 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "fac336b0-c1a6-46a0-8133-3a2b0704a2b3", + "metadata": {}, + "outputs": [], + "source": [ + "# -- create enclave node\n", + "# -- attach to primary domain\n", + "# -- phase 2 launch python enclave dynamically instead\n", + "# -- phase 3 run on cloud enclave with k3d (dynamically after)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/enclave/03-secondary-domain-setup.ipynb b/notebooks/scenarios/enclave/03-secondary-domain-setup.ipynb new file mode 100644 index 00000000000..bc8251c65a4 --- /dev/null +++ b/notebooks/scenarios/enclave/03-secondary-domain-setup.ipynb @@ -0,0 +1,37 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "0cb79e18-a7f7-4096-b20f-31aef7b049c3", + "metadata": {}, + "outputs": [], + "source": [ + "# -- upload inference tensor\n", + "# -- phase 2 inference eval dataset\n", + "# -- create user account" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/enclave/04-data-scientist-join.ipynb b/notebooks/scenarios/enclave/04-data-scientist-join.ipynb new file mode 100644 index 00000000000..d49d09ccbe0 --- /dev/null +++ b/notebooks/scenarios/enclave/04-data-scientist-join.ipynb @@ -0,0 +1,51 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "52c96d72-c333-4b5b-8631-caaf3c48e4d0", + "metadata": {}, + "outputs": [], + "source": [ + "# -- connect to domains\n", + "# -- associate domains?\n", + "# -- list enclaves\n", + "# -- find datasets\n", + "# -- execution policies\n", + "# -- phase 2 - add a hf model and custom worker image to execution policy\n", + "# -- phase 3 eager data scientist inference inputs in InputPolicy\n", + "# -- create usercode sum(a, b)\n", + "# -- submit project" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0ebf6dc1-6b71-4c6b-826b-c35018a041e7", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/enclave/05-domains-review.ipynb b/notebooks/scenarios/enclave/05-domains-review.ipynb new file mode 100644 index 00000000000..0220db2d7d0 --- /dev/null +++ b/notebooks/scenarios/enclave/05-domains-review.ipynb @@ -0,0 +1,41 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "6fe704db-90b5-4511-8b29-0056eb82e967", + "metadata": {}, + "outputs": [], + "source": [ + "# -- review project\n", + "# -- inspect code\n", + "# -- step through execution policy\n", + "# -- query enclave attestation\n", + "# -- approve execution\n", + "# -- phase 2 - once approved everywhere, setup custom image on enclave\n", + "# -- phase 3 - once approved deploy with terraform etc" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/enclave/06-manual-execution.ipynb b/notebooks/scenarios/enclave/06-manual-execution.ipynb new file mode 100644 index 00000000000..6d5f77a8f01 --- /dev/null +++ b/notebooks/scenarios/enclave/06-manual-execution.ipynb @@ -0,0 +1,46 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "6a7cf74a-a267-4e4d-a167-aa02364ca860", + "metadata": {}, + "outputs": [], + "source": [ + "# -- get project\n", + "# -- check project status\n", + "# -- run code\n", + "# -- get result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "611f94c5-a6fd-4cb6-a581-d878bc11bcdc", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/enclave/07-audit-project-logs.ipynb b/notebooks/scenarios/enclave/07-audit-project-logs.ipynb new file mode 100644 index 00000000000..3af993e7572 --- /dev/null +++ b/notebooks/scenarios/enclave/07-audit-project-logs.ipynb @@ -0,0 +1,36 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "c36fced0-3a9d-439f-b237-64a71a1ee3ac", + "metadata": {}, + "outputs": [], + "source": [ + "# -- domain owners view logs from enclave on domain\n", + "# -- step through execution policy at each step who did what" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/enclave/08-enclave-shutdown.ipynb b/notebooks/scenarios/enclave/08-enclave-shutdown.ipynb new file mode 100644 index 00000000000..2f0e245e8fd --- /dev/null +++ b/notebooks/scenarios/enclave/08-enclave-shutdown.ipynb @@ -0,0 +1,35 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "a3cfb45c-9dbd-485d-a71a-e024c9889715", + "metadata": {}, + "outputs": [], + "source": [ + "# -- primary terminates enclave" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/scenarios/reverse-tunnel/01-why-reverse-tunnel.ipynb b/notebooks/scenarios/reverse-tunnel/01-why-reverse-tunnel.ipynb index 47de024979e..7161744c040 100644 --- a/notebooks/scenarios/reverse-tunnel/01-why-reverse-tunnel.ipynb +++ b/notebooks/scenarios/reverse-tunnel/01-why-reverse-tunnel.ipynb @@ -7,6 +7,7 @@ "metadata": {}, "outputs": [], "source": [ + "# -- include cleaned up diagram?\n", "# -- NAT Firewall problem\n", "# -- Solution: rathole" ] diff --git a/notebooks/scenarios/reverse-tunnel/08-debugging-tunnel.ipynb b/notebooks/scenarios/reverse-tunnel/08-debugging.ipynb similarity index 83% rename from notebooks/scenarios/reverse-tunnel/08-debugging-tunnel.ipynb rename to notebooks/scenarios/reverse-tunnel/08-debugging.ipynb index edabd3ff979..24d912d0938 100644 --- a/notebooks/scenarios/reverse-tunnel/08-debugging-tunnel.ipynb +++ b/notebooks/scenarios/reverse-tunnel/08-debugging.ipynb @@ -9,8 +9,10 @@ "source": [ "# -- include cleaned up diagram?\n", "# -- tunnel config file and config maps\n", + "# -- determining ip addresses and testing ports are available\n", "# -- running curl from inside the containers\n", - "# -- the internal host / header on the gateway" + "# -- the internal host / header on the gateway\n", + "# -- checking logs" ] } ], From f86080aba4bdf3c0b9228ba108307a1a586b2145 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Mon, 17 Jun 2024 10:07:11 +0300 Subject: [PATCH 158/313] add several server-side methods --- .../syft/src/syft/service/request/request.py | 29 ++++++++++++++++--- .../syft/service/request/request_service.py | 2 +- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 6bf53f28669..88f9e0df271 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -547,6 +547,12 @@ def codes(self) -> Any: message="This type of request does not have code associated with it." ) + def get_user_code(self, context: AuthedServiceContext) -> UserCode | None: + for change in self.changes: + if isinstance(change, UserCodeStatusChange): + return change.get_user_code(context) + return None + @property def code(self) -> UserCode | SyftError: for change in self.changes: @@ -572,10 +578,10 @@ def current_change_state(self) -> dict[UID, bool]: def icon(self) -> str: return Icon.REQUEST.svg - @property - def status(self) -> RequestStatus: - if self.is_low_side: - code_status = self.code.status + def get_status(self, context: AuthedServiceContext | None = None) -> RequestStatus: + is_low_side = self.get_is_low_side(context) if context else self.is_low_side + if is_low_side: + code_status = self.code.get_status(context) if context else self.code.status return RequestStatus.from_usercode_status(code_status) if len(self.history) == 0: @@ -591,6 +597,10 @@ def status(self) -> RequestStatus: return request_status + @property + def status(self) -> RequestStatus: + return self.get_status() + def approve( self, disable_warnings: bool = False, @@ -664,6 +674,13 @@ def deny(self, reason: str) -> SyftSuccess | SyftError: def is_low_side(self) -> bool: return bool(self.code) and self.code.is_low_side + def get_is_low_side(self, context: AuthedServiceContext) -> bool: + code = self.get_user_code(context) + if code: + return code.is_low_side + else: + return False + def approve_with_client(self, client: SyftClient) -> Result[SyftSuccess, SyftError]: if self.is_low_side: return SyftError( @@ -1144,6 +1161,10 @@ class UserCodeStatusChange(Change): def code(self) -> UserCode: return self.linked_user_code.resolve + def get_user_code(self, context: AuthedServiceContext) -> UserCode: + resolve = self.linked_user_code.resolve_with_context(context) + return resolve.ok() + @property def codes(self) -> list[UserCode]: def recursive_code(node: Any) -> list: diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index ac166f0a32a..b48f75f5149 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -211,7 +211,7 @@ def apply( request_notification = filter_by_obj(context=context, obj_uid=uid) link = LinkedObject.with_context(request, context=context) - if not request.status == RequestStatus.PENDING: + if not request.get_status(context) == RequestStatus.PENDING: if request_notification is not None and not isinstance( request_notification, SyftError ): From 2cb13378d4cd89cf2b70450e8d8a99fda39801d6 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Mon, 17 Jun 2024 10:41:48 +0300 Subject: [PATCH 159/313] fix twin api diff --- packages/syft/src/syft/service/api/api.py | 8 ++++---- tests/integration/local/twin_api_sync_test.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/syft/src/syft/service/api/api.py b/packages/syft/src/syft/service/api/api.py index 6977f29e8c7..a5d9db8f98d 100644 --- a/packages/syft/src/syft/service/api/api.py +++ b/packages/syft/src/syft/service/api/api.py @@ -365,6 +365,10 @@ class TwinAPIEndpoint(SyncableSyftObject): # version __canonical_name__: str = "TwinAPIEndpoint" __version__ = SYFT_OBJECT_VERSION_1 + __exclude_sync_diff_attrs__ = ["private_function"] + __private_sync_attr_mocks__ = { + "private_function": None, + } def __init__(self, **kwargs: Any) -> None: super().__init__(**kwargs) @@ -378,10 +382,6 @@ def __init__(self, **kwargs: Any) -> None: worker_pool: str | None = None endpoint_timeout: int = 60 - __private_sync_attr_mocks__ = { - "private_function": None, - } - __attr_searchable__ = ["path"] __attr_unique__ = ["path"] __repr_attrs__ = [ diff --git a/tests/integration/local/twin_api_sync_test.py b/tests/integration/local/twin_api_sync_test.py index e09c82001d1..fed1905370f 100644 --- a/tests/integration/local/twin_api_sync_test.py +++ b/tests/integration/local/twin_api_sync_test.py @@ -100,7 +100,7 @@ def compute(query): ) job_high = high_client.code.compute(query=high_client.api.services.testapi.query) - high_client.requests[0].accept_by_depositing_result(job_high) + high_client.requests[0].deposit_result(job_high) diff_before, diff_after = compare_and_resolve( from_client=high_client, to_client=low_client ) From 929d58360453b9ec022038a799dde979aac3cff7 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Mon, 17 Jun 2024 10:42:49 +0300 Subject: [PATCH 160/313] remove stale test after removing sync_code_from_request --- .../local/request_multiple_nodes_test.py | 93 ------------------- 1 file changed, 93 deletions(-) diff --git a/tests/integration/local/request_multiple_nodes_test.py b/tests/integration/local/request_multiple_nodes_test.py index e81f75b57d6..4b0a2950c66 100644 --- a/tests/integration/local/request_multiple_nodes_test.py +++ b/tests/integration/local/request_multiple_nodes_test.py @@ -7,8 +7,6 @@ # syft absolute import syft as sy -from syft.service.job.job_stash import Job -from syft.service.job.job_stash import JobStatus @pytest.fixture(scope="function") @@ -107,94 +105,3 @@ def dataset_2(client_do_2): client_do_2.upload_dataset(dataset) return client_do_2.datasets[0].assets[0] - - -@pytest.mark.flaky(reruns=3, reruns_delay=3) -@pytest.mark.local_node -def test_transfer_request_blocking( - client_ds_1, client_do_1, client_do_2, dataset_1, dataset_2 -): - @sy.syft_function_single_use(data=dataset_1) - def compute_sum(data) -> float: - return data.mean() - - client_ds_1.code.request_code_execution(compute_sum) - - # Submit + execute on second node - request_1_do = client_do_1.requests[0] - client_do_2.sync_code_from_request(request_1_do) - - # DO executes + syncs - client_do_2._fetch_api(client_do_2.credentials) - result_2 = client_do_2.code.compute_sum(data=dataset_2).get() - assert result_2 == dataset_2.data.mean() - res = request_1_do.accept_by_depositing_result(result_2) - assert isinstance(res, sy.SyftSuccess) - - # DS gets result blocking + nonblocking - result_ds_blocking = client_ds_1.code.compute_sum( - data=dataset_1, blocking=True - ).get() - - job_1_ds = client_ds_1.code.compute_sum(data=dataset_1, blocking=False) - assert isinstance(job_1_ds, Job) - assert job_1_ds == client_ds_1.code.compute_sum.jobs[-1] - assert job_1_ds.status == JobStatus.COMPLETED - - result_ds_nonblocking = job_1_ds.wait().get() - - assert result_ds_blocking == result_ds_nonblocking == dataset_2.data.mean() - - -@pytest.mark.flaky(reruns=3, reruns_delay=3) -@pytest.mark.local_node -def test_transfer_request_nonblocking( - client_ds_1, client_do_1, client_do_2, dataset_1, dataset_2 -): - @sy.syft_function_single_use(data=dataset_1) - def compute_mean(data) -> float: - return data.mean() - - client_ds_1.code.request_code_execution(compute_mean) - - # Submit + execute on second node - request_1_do = client_do_1.requests[0] - client_do_2.sync_code_from_request(request_1_do) - - client_do_2._fetch_api(client_do_2.credentials) - job_2 = client_do_2.code.compute_mean(data=dataset_2, blocking=False) - assert isinstance(job_2, Job) - - # Transfer back Job Info - job_2_info = job_2.info() - assert job_2_info.result is None - assert job_2_info.status is not None - res = request_1_do.sync_job(job_2_info) - assert isinstance(res, sy.SyftSuccess) - - # DS checks job info - job_1_ds = client_ds_1.code.compute_mean.jobs[-1] - assert job_1_ds.status == job_2.status - - # DO finishes + syncs job result - result = job_2.wait().get() - assert result == dataset_2.data.mean() - assert job_2.status == JobStatus.COMPLETED - - job_2_info_with_result = job_2.info(result=True) - res = request_1_do.accept_by_depositing_result(job_2_info_with_result) - assert isinstance(res, sy.SyftSuccess) - - # DS gets result blocking + nonblocking - result_ds_blocking = client_ds_1.code.compute_mean( - data=dataset_1, blocking=True - ).get() - - job_1_ds = client_ds_1.code.compute_mean(data=dataset_1, blocking=False) - assert isinstance(job_1_ds, Job) - assert job_1_ds == client_ds_1.code.compute_mean.jobs[-1] - assert job_1_ds.status == JobStatus.COMPLETED - - result_ds_nonblocking = job_1_ds.wait().get() - - assert result_ds_blocking == result_ds_nonblocking == dataset_2.data.mean() From 18e0fad3540287dca364defd7ad6b5f6f3d679cc Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Mon, 17 Jun 2024 10:08:58 +0200 Subject: [PATCH 161/313] fix repr --- packages/syft/src/syft/service/sync/sync_state.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/syft/src/syft/service/sync/sync_state.py b/packages/syft/src/syft/service/sync/sync_state.py index 85b876b485e..812f96fac19 100644 --- a/packages/syft/src/syft/service/sync/sync_state.py +++ b/packages/syft/src/syft/service/sync/sync_state.py @@ -242,9 +242,17 @@ def rows(self) -> list[SyncStateRow]: if previous_diff is None: raise ValueError("No previous state to compare to") for batch in previous_diff.batches: + # NOTE we re-use NodeDiff to compare to previous state, + # low_obj is previous state, high_obj is current state diff = batch.root_diff + + # If there is no high object, it means it was deleted + # and we don't need to show it in the table + if diff.high_obj is None: + continue if diff.object_id in ids: continue + ids.add(diff.object_id) row = SyncStateRow( object=diff.high_obj, From 066de7d58a91df22e0402e4bec4e05aa12a6a56e Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Mon, 17 Jun 2024 10:44:36 +0200 Subject: [PATCH 162/313] remove outdated test --- .../service/sync/sync_resolve_single_test.py | 33 ------------------- 1 file changed, 33 deletions(-) diff --git a/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py b/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py index adc6346fd10..868f9f5203d 100644 --- a/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py +++ b/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py @@ -8,7 +8,6 @@ from syft.client.sync_decision import SyncDecision from syft.client.syncing import compare_clients from syft.client.syncing import resolve -from syft.service.code.user_code import UserCode from syft.service.job.job_stash import Job from syft.service.request.request import RequestStatus from syft.service.response import SyftError @@ -181,38 +180,6 @@ def compute() -> int: assert len(diff.all_batches) == 2 -def test_forget_usercode(low_worker, high_worker): - low_client = low_worker.root_client - client_low_ds = low_worker.guest_client - high_client = high_worker.root_client - - @sy.syft_function_single_use() - def compute() -> int: - print("computing...") - return 42 - - _ = client_low_ds.code.request_code_execution(compute) - - diff_before, diff_after = compare_and_resolve( - from_client=low_client, to_client=high_client - ) - - run_and_deposit_result(high_client) - - def skip_if_user_code(diff): - if diff.root_type is UserCode: - return SyncDecision.IGNORE - return SyncDecision.SKIP - - diff_before, diff_after = compare_and_resolve( - from_client=low_client, - to_client=high_client, - decision_callback=skip_if_user_code, - ) - assert not diff_before.is_same - assert len(diff_after.batches) == 1 - - def test_request_code_execution_multiple(low_worker, high_worker): low_client = low_worker.root_client client_low_ds = low_worker.guest_client From c3a8aa9f7af9422a181bc462e571c31a3740fb97 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Mon, 17 Jun 2024 11:22:40 +0200 Subject: [PATCH 163/313] fix typing --- packages/syft/src/syft/client/syncing.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index 103d1434126..903bcc6e515 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -1,7 +1,7 @@ # stdlib # stdlib -from collections.abc import Sequence +from collections.abc import Collection # relative from ..abstract_node import NodeSideType @@ -27,8 +27,8 @@ def compare_states( include_ignored: bool = False, include_same: bool = False, filter_by_email: str | None = None, - include_types: Sequence[str | type] | None = None, - exclude_types: Sequence[str | type] | None = None, + include_types: Collection[str | type] | None = None, + exclude_types: Collection[str | type] | None = None, _hide_usercode: bool = True, ) -> NodeDiff | SyftError: # NodeDiff @@ -73,8 +73,8 @@ def compare_clients( include_ignored: bool = False, include_same: bool = False, filter_by_email: str | None = None, - include_types: Sequence[str | type] | None = None, - exclude_types: Sequence[str | type] | None = None, + include_types: Collection[str | type] | None = None, + exclude_types: Collection[str | type] | None = None, _hide_usercode: bool = True, ) -> NodeDiff | SyftError: from_state = from_client.get_sync_state() From 13136334a757368b6139f190ebd3c44dd57e12bd Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Mon, 17 Jun 2024 11:58:02 +0200 Subject: [PATCH 164/313] enclaves fixes --- .../src/syft/service/action/action_service.py | 9 +++++--- .../syft/service/enclave/enclave_service.py | 22 +++++++++++++++++-- .../syft/src/syft/service/policy/policy.py | 2 +- 3 files changed, 27 insertions(+), 6 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 08e8119ee22..69ca9ed8175 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -123,6 +123,7 @@ def _set( has_result_read_permission: bool = False, add_storage_permission: bool = True, ignore_detached_objs: bool = False, + skip_clear_cache: bool = False ) -> Result[ActionObject, str]: if self.is_detached_obj(action_object, ignore_detached_objs): return Err( @@ -133,12 +134,14 @@ def _set( if isinstance(action_object, ActionObject): action_object.syft_created_at = DateTime.now() - action_object._clear_cache() + if not skip_clear_cache: + action_object._clear_cache() else: action_object.private_obj.syft_created_at = DateTime.now() # type: ignore[unreachable] action_object.mock_obj.syft_created_at = DateTime.now() - action_object.private_obj._clear_cache() - action_object.mock_obj._clear_cache() + if not skip_clear_cache: + action_object.private_obj._clear_cache() + action_object.mock_obj._clear_cache() # If either context or argument is True, has_result_read_permission is True has_result_read_permission = ( diff --git a/packages/syft/src/syft/service/enclave/enclave_service.py b/packages/syft/src/syft/service/enclave/enclave_service.py index 03edc7af6a7..46afbb6af8c 100644 --- a/packages/syft/src/syft/service/enclave/enclave_service.py +++ b/packages/syft/src/syft/service/enclave/enclave_service.py @@ -78,14 +78,32 @@ def send_user_code_inputs_to_enclave( root_context.extra_kwargs = {"has_result_read_permission": True} # TODO: Instead of using the action store, modify to # use the action service directly to store objects - action_service._set(root_context, dict_object) + # TODO: we store this in the actionstore isntead of blob stoarge, + # which is bad, but we cannot update in the blobstorage + res = action_service._set( + root_context, + dict_object, + ignore_detached_objs=True, + skip_clear_cache=True, + ) + if res.is_err(): + return SyftError(message=res.value) else: res = action_service.get(uid=user_code_id, context=root_context) if res.is_ok(): dict_object = res.ok() dict_object[str(context.credentials)] = inputs - action_service._set(root_context, dict_object) + # TODO: we store this in the actionstore isntead of blob stoarge, + # which is bad, but we cannot update in the blobstorage + res = action_service._set( + root_context, + dict_object, + ignore_detached_objs=True, + skip_clear_cache=True, + ) + if res.is_err(): + return SyftError(message=res.value) else: return SyftError( message=f"Error while fetching the object on Enclave: {res.err()}" diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index 736bf85407c..0359b0db0a2 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -361,7 +361,7 @@ def _is_valid( not_approved_kwargs = set(expected_input_kwargs) - set(permitted_input_kwargs) if len(not_approved_kwargs) > 0: return Err( - f"Input arguments: {not_approved_kwargs} to the function are not approved yet." + f"Function arguments: {not_approved_kwargs} are not approved yet." ) return Ok(True) From a4c9172bae0cb40af596d833381126c01c865a4c Mon Sep 17 00:00:00 2001 From: teo Date: Mon, 17 Jun 2024 14:33:26 +0300 Subject: [PATCH 165/313] fix unit test --- packages/syft/src/syft/service/action/action_service.py | 2 +- packages/syft/tests/syft/eager_test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 69ca9ed8175..0dcb2271a6c 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -123,7 +123,7 @@ def _set( has_result_read_permission: bool = False, add_storage_permission: bool = True, ignore_detached_objs: bool = False, - skip_clear_cache: bool = False + skip_clear_cache: bool = False, ) -> Result[ActionObject, str]: if self.is_detached_obj(action_object, ignore_detached_objs): return Err( diff --git a/packages/syft/tests/syft/eager_test.py b/packages/syft/tests/syft/eager_test.py index 1cbfd11e55f..7640e295cdc 100644 --- a/packages/syft/tests/syft/eager_test.py +++ b/packages/syft/tests/syft/eager_test.py @@ -96,7 +96,7 @@ def my_plan(x=np.array([[2, 2, 2], [2, 2, 2]])): # noqa: B008 mock_obj=np.array([[1, 1, 1], [1, 1, 1]]), ) - input_obj = root_domain_client.api.services.action.set(input_obj) + input_obj = input_obj.send(root_domain_client) pointer = guest_client.api.services.action.get_pointer(input_obj.id) res_ptr = plan_ptr(x=pointer) From 6d81b27bf029f5dc70046599f683f3c4d52be6f3 Mon Sep 17 00:00:00 2001 From: teo Date: Mon, 17 Jun 2024 14:47:39 +0300 Subject: [PATCH 166/313] fix k8s notebook --- notebooks/api/0.8/11-container-images-k8s.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/notebooks/api/0.8/11-container-images-k8s.ipynb b/notebooks/api/0.8/11-container-images-k8s.ipynb index c685e6f8d7d..27340a8d9e5 100644 --- a/notebooks/api/0.8/11-container-images-k8s.ipynb +++ b/notebooks/api/0.8/11-container-images-k8s.ipynb @@ -795,7 +795,7 @@ "data = np.array([1, 2, 3])\n", "data_action_obj = sy.ActionObject.from_obj(data)\n", "\n", - "data_pointer = domain_client.api.services.action.set(data_action_obj)\n", + "data_pointer = data_action_obj.send(domain_client)\n", "data_pointer" ] }, @@ -1500,7 +1500,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.11.8" } }, "nbformat": 4, From e58289ccfc73393bbf84cef23fb9755f4b23e0a8 Mon Sep 17 00:00:00 2001 From: teo Date: Mon, 17 Jun 2024 15:10:56 +0300 Subject: [PATCH 167/313] fix k8s unit test --- tests/integration/container_workload/pool_image_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/container_workload/pool_image_test.py b/tests/integration/container_workload/pool_image_test.py index a7fcd7d691a..a3a53aa2385 100644 --- a/tests/integration/container_workload/pool_image_test.py +++ b/tests/integration/container_workload/pool_image_test.py @@ -274,7 +274,7 @@ def test_pool_image_creation_job_requests( # Dataset data = np.array([1, 2, 3]) data_action_obj = sy.ActionObject.from_obj(data) - data_pointer = ds_client.api.services.action.set(data_action_obj) + data_pointer = data_action_obj.send(ds_client) # Function @sy.syft_function( From 05a6da5d74ca7568b5379987dee85d0b2b56cdc8 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Mon, 17 Jun 2024 14:31:41 +0200 Subject: [PATCH 168/313] fix share twinAPI message --- packages/syft/src/syft/service/sync/resolve_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/sync/resolve_widget.py b/packages/syft/src/syft/service/sync/resolve_widget.py index 261ed28e075..0439228cd27 100644 --- a/packages/syft/src/syft/service/sync/resolve_widget.py +++ b/packages/syft/src/syft/service/sync/resolve_widget.py @@ -139,7 +139,7 @@ def share_private_data(self) -> bool: @property def warning_html(self) -> str: if isinstance(self.diff.non_empty_object, TwinAPIEndpoint): - message = "Only the private function of a TwinAPI will be synced to the public node." + message = "Only the public function of a TwinAPI will be synced to the public node." return Alert(message=message).to_html() elif self.show_share_warning: message = ( From d4355475385abe17eae1884ed2d983abe6d16fe2 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Mon, 17 Jun 2024 15:36:33 +0200 Subject: [PATCH 169/313] add warning message --- packages/syft/src/syft/client/syncing.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index 903bcc6e515..342df11b6d8 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -16,6 +16,7 @@ from ..service.sync.sync_state import SyncState from ..types.uid import UID from ..util.decorators import deprecated +from ..util.util import prompt_warning_message from .domain_client import DomainClient from .sync_decision import SyncDecision from .sync_decision import SyncDirection @@ -29,7 +30,7 @@ def compare_states( filter_by_email: str | None = None, include_types: Collection[str | type] | None = None, exclude_types: Collection[str | type] | None = None, - _hide_usercode: bool = True, + hide_usercode: bool = True, ) -> NodeDiff | SyftError: # NodeDiff if ( @@ -51,7 +52,11 @@ def compare_states( "Invalid node side types: can only compare a high and low node" ) - if _hide_usercode: + if hide_usercode: + prompt_warning_message( + "User code is hidden by default, as they are also part of the Request." + " If you want to include them, set hide_usercode=False." + ) exclude_types = exclude_types or [] exclude_types.append("usercode") @@ -75,7 +80,7 @@ def compare_clients( filter_by_email: str | None = None, include_types: Collection[str | type] | None = None, exclude_types: Collection[str | type] | None = None, - _hide_usercode: bool = True, + hide_usercode: bool = True, ) -> NodeDiff | SyftError: from_state = from_client.get_sync_state() if isinstance(from_state, SyftError): @@ -93,7 +98,7 @@ def compare_clients( filter_by_email=filter_by_email, include_types=include_types, exclude_types=exclude_types, - _hide_usercode=_hide_usercode, + hide_usercode=hide_usercode, ) From 5ab8297f868991bfad9be5e116f0c586b3576a3a Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Mon, 17 Jun 2024 23:45:12 +1000 Subject: [PATCH 170/313] Removed broken helm builds --- packages/grid/helm/repo/index.yaml | 44 ------------------ .../grid/helm/repo/syft-0.8.2-beta.53.tgz | Bin 14856 -> 0 bytes packages/grid/helm/repo/syft-0.8.3-beta.3.tgz | Bin 8990 -> 0 bytes .../grid/helm/repo/syft-0.8.4-beta.17.tgz | Bin 7523 -> 0 bytes 4 files changed, 44 deletions(-) delete mode 100644 packages/grid/helm/repo/syft-0.8.2-beta.53.tgz delete mode 100644 packages/grid/helm/repo/syft-0.8.3-beta.3.tgz delete mode 100644 packages/grid/helm/repo/syft-0.8.4-beta.17.tgz diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index 4c88b674ff0..ed6925dafca 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -505,18 +505,6 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.4-beta.18.tgz version: 0.8.4-beta.18 - - apiVersion: v2 - appVersion: 0.8.4-beta.17 - created: "2024-06-03T13:45:21.35354575Z" - description: Perform numpy-like analysis on data that remains in someone elses - server - digest: 71b39c5a4c64037eadbb154f7029282ba90d9a0d703f8d4c7dfc1ba2f5d81498 - icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png - name: syft - type: application - urls: - - https://openmined.github.io/PySyft/helm/syft-0.8.4-beta.17.tgz - version: 0.8.4-beta.17 - apiVersion: v2 appVersion: 0.8.4-beta.16 created: "2024-06-03T13:45:21.353142151Z" @@ -781,22 +769,6 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.3-beta.4.tgz version: 0.8.3-beta.4 - - apiVersion: v2 - appVersion: 0.8.3-beta.3 - created: "2024-06-03T13:45:21.346709046Z" - dependencies: - - name: component-chart - repository: https://charts.devspace.sh - version: 0.9.1 - description: Perform numpy-like analysis on data that remains in someone elses - server - digest: 9162bc14e40021b56111c3c9dbeba2596ce1ff469263b0a1e0a4679174c14ef7 - icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png - name: syft - type: application - urls: - - https://openmined.github.io/PySyft/helm/syft-0.8.3-beta.3.tgz - version: 0.8.3-beta.3 - apiVersion: v2 appVersion: 0.8.3-beta.2 created: "2024-06-03T13:45:21.346158131Z" @@ -925,22 +897,6 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.2-beta.56.tgz version: 0.8.2-beta.56 - - apiVersion: v2 - appVersion: 0.8.2-beta.53 - created: "2024-06-03T13:45:21.340191661Z" - dependencies: - - name: component-chart - repository: https://charts.devspace.sh - version: 0.9.1 - description: Perform numpy-like analysis on data that remains in someone elses - server - digest: ab91512010a81ca45ff65030c366c6d16873913199a5346013cd24ee6348df84 - icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png - name: syft - type: application - urls: - - https://openmined.github.io/PySyft/helm/syft-0.8.2-beta.53.tgz - version: 0.8.2-beta.53 - apiVersion: v2 appVersion: 0.8.2-beta.52 created: "2024-06-03T13:45:21.339543493Z" diff --git a/packages/grid/helm/repo/syft-0.8.2-beta.53.tgz b/packages/grid/helm/repo/syft-0.8.2-beta.53.tgz deleted file mode 100644 index a4baf184d7abdeeaaa29ec1019872cb33fdb7ef4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 14856 zcmV+jI`_pNiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PMYMa~wC4D7v5dE9y{lcE)>667L5i=ERjHd8|8na7EdB=j`3k zCV&K4tM01as%pv{jqYzhQC0n@uC6AVtrjiYM1(Ci3P@z)k;qIW66JNQPM%ysQ5|2y zc=S(SA`wD}5~BH+5YqpbNU8pb3duOvR1^A7gfgo1KPBXgVAD&}vV!8D$Y;0JC+=60 zV3PiXMVaQ=X>!F5U@}?!B**5M9XYJvSn`8_6U+i;KE?9%;3&ypjHiju$CEt6tUB`b z96Csn0w;NyR(WxKnp{@Zq&z)2X}*=m0k6slcswpI50d0+*^WJ?2LVf8q?4-2I>RF7 z#W=~PRrX?0W&nnESW1o*tUQaN4nPwPH&aThvB@yuIB;>w4Nyir_Ra%Yk zUAUfe}9JC!!XC7476Ff~|G8v^FY6=dnhCA_>9_{tiS^uNlzyBg_+_wI? zplj=2$cOcRAL)xP>+}N4>NF`Yfk|~5ooCJnp`F!{D&sA+7Kn4fbFfkp3e*B;l!FYx z1r6FbFEBE2*4lV*ff*K1VK_|=8Dad0*du1%GI}cHDOJZtSxJN-^8X_8l#|1+aLIjD zsagM5Fq&fd#RPy`)<5M;^w+12ac6{(Cd3%|bNm?e5 z{O$3}7f1D$3{{08iRlRI0#DJ8pupq`iWJ-k%cRN^hsgxW5<`+^Ri0eu(;}&GJQ+cS z_S5ir1&fp}&Oe0>^mRdVCvg4wv6se(VlcruzHF#G&G zORL~|u!jo%SYpvK(!(Ops|`TbN*%PAb?_>&s?zMDT*Wdks?+4~k4zJ)|F{DDhrH&s zht1!I2M1Xmu<7O%Fx99$O(Kj+Y<^zmWi>K;o*xrb3*s3~28 zeASj7U)Mhl7ujzcDd;fdwMf=x&o+Y}pu9Xy4#D}ugM&1?D6njBnf>`>1Z7z_e~=`P z`kCDx=l-q(z};r?k>Rxlc+*+ug4V{+Uo2lbjyK7BfF%Kxoq=>?7uO04&X zntg<(@S8SswfNgS86gx|GR_N3z~$2_sV;F2;oGDtu1^nUd@M=+kX+(uT<-|gv>bw( z9A#_SjRcx}`qYPVfmH&d(GZxXXxr@f`Dm1Xs44a}BboE9GO0H=Osh*Q5(ojqNrB`1 z3d3<*?_=}-!!szV6h^h;_}cv!`s(m>_Nn>Z;L2#}Q3y&gS}p@nicWh8Q5ec7gb0KL;fW7U?SLo0 z2~WNOPwu_a;3BsI43R`BFa|IZhd`wZsEBnIy+e(jN~#%xm#Bd_jZtsipDT0XyvhrF zl4mi!*u#{dI-sfhHrCUc(hwXGj0(bqj?rry1Q=OQV@X7SK(+B8kTR~(8@1cCepyR) zPt(0{7$u12f?I_F&=87L5gJj~i4s&=8HrLd@QyGcAQ&g2GFDpaz=PeLfc*{xN(R6U zLr#bzPFVn8!4QDp1ah#zJzpF1?}BMs1bQ#Cm3MCmOzka(|;BP%^2BAs!W#0D>~+ z2y+mOFdBu|K?M@2rZH*_&UuT*2acSFXe{iuqOrJ2{r-sXU}==rMIvOd!Un^PB2fp) zl$0(y3@Ch5E_mqxO*BGBVt}ehWYA*AspR%YfQpeZ3kXyhU2m!uPy`w+Nji8!nU{RZ$2zY$<{8<(zd7ABGDR_d08>Jm3no^D`Vjzku;eadQnQ$x$ zh+H}h?$il^nsVV5}ut3q%)V1ke(!u@bcxNP!@+LdhTl z5C>6s4%`vWl?}VCh4doBP|Ki?&z?Vl(P$s6RVkoU1!NI~Ko>$SDX^e}1R`|6;G~o> zFhYSu!Kn|1P#tXG92DJ=kiC*XsBnP>VmXhBXkv^3PB{h#jN5wROB;|01Qgaq#;LG zw+(Ja!XD6P&H)WJ`lz%9X^=}7E#U!q0LBo7r5qyV27nXFAX+b!0}D{a9kL zkJ+P*;tG|Bnrl>q3F)}?kqZ+ziH;B@IYmP}Wlj?SU9(pv1ybj@-fdT_ao?NvWw;H& z58J5z9uQ25JzNZK|~q{JEO4G1c@ zkD8!}meOFPVwy9EOh|*yc_F0-C8S`HQm%OjyQvB_1wW6qDC!^P?AL^ z7znPIM};gB2q;-pf+}!|3hOviAc=BB>D}j0jsF>3>|+*D6~HS-tfLkz7l9dtkzxd+ z1=kh{5yW#QP*cLVmpV}ARbYWDP4(_$a8{&OP~l(kdJj}+@2FMMXk&CFk-HcjvnB)^ zxidx*i`FqMjJC{06FlL~rmdVcnu^`{(W^Y*`RoW`Z+ywA5WK3zRZW0<=7m-SIYkOm z2F)-K;XNP=gi?tPS#+QxbJ4m$>26|Pmy*5EpcoPL4y;WSQ3j{Ar-Vf-oaWqfu*@26 znWl~hC`Il>KsjbIeI zfRq@&n^y3{+qY*=@+`x7;m@`sB2tSGrEKg%R!9UPEfbz_g`Px72&DoVHl*vJv5JE6 z480XTxSgmtyRI(tY_D_>O07`QTZjadH9AVKowtrKVjPQrHYmxcmYfPsoX4OYNWru* z0qAaP_$gHQHXZL{M2kSi-pc&yxf(rF!tqcbrKyebi5TxFyIWn@_ zW?fIQ^C~ak0-t47v5)QAM(POD-XpgJQAJI?218K`Mx4_Wgw~F=v(SMSs0{_HotJ_O z+}a$!iH?1qfEZ(};0}4Tni@qd34y7g5u*!{Sx2Q;k;R%PBjt)1q9i&WoVFHsTQ#i? z@dXA(QQ(?$uF!BLWTZ|7ZGbbv84nggAliDm*Q4&~P1rTVoUhP~JICmG?3Vp^1loW@F#ats-q$Mk-h^`!e6Uh5 zhTeeoTA+}K$^d1O7~zSD#+u-*0LlZC5hchNYQ5Y`!gdo=YN5AQGT!9Vy=Y0)66$P} zJX))XN9KazL_4ridx)N(Mh7(1L^2eF3a=<5sE9ZVn`4~u!AK}05m+C*x6Z(B3%y<4`yzol;Wd@YIuk$%CXLce814dB!3zLufwLgI zlhi6{nV^A6=Z!T)&>j1~UgG;f-&zkW0!N`@u!a)GSYXOV@Cv+lQmCjnLo0#|QcGqD zt#?6`5~76NtZp;(&)@?0Wc+tDAQjG8r+`VTgbC5h`j;>fBvX_sh$smtV|~3Ioa3q% zr0N|d?nXvCk@hzh+!02Cb;dH|UC?N06x3^B!3keW=Coer$||6SN9MH;lv9SzDM5BT zfqC2%s)=|uo>co#Ta9(nQWjAPs-k7J;@}KX#-Spb0^#0h5a5^+R@#7)iAbGMXk{d9 zNB54V(D&&G_jI03BWJ-1r==sHoIs;Iddq{Y*EEru8%8bi=CmnF8Up7M(QpO|1K-Wr z>G9so&^~I4*lcH!>gXH|fk0411|1bxPiYLo2FdhpdvuHa-rUU; zXF{k*1(lRAVTrdqLUdAvV9_xfv{j5d?$8=FlLC|P1Sz(Gx+KyRC_twMqxEFWZaP8G}B6X ziS_a|5Nj67RWMd^%Xv_dB1FR-6@*YUM*Fba7`&cVlWFxsT2}d9%z*;ELWMDyde34` zXu%{kMp8*#h$t{J#iU|Xi(0id;1n2e4k&<8yV(j_BF^$r>aU;ujDA|-lYF`t2Q=ks z!IV2L>wQfLFSs+@c;Hbe?*l?`N?Ii;wNytI7$eG1;M$7{mdqeJuc#-K2pTy^A+(@cb84jk7a-PbSp;xiE4SOpUvqZf5VIG*OzWt2G+5=q zqj5q8P{As0sgjWe5CA$xDwV6HMaIB0s$JAZs$h2Xj-9%3TJ4n%tD=tFGHoLSuZeG^ zb73%upadgeC}+V*id+k&0@Ex8pw>z*rPzHWp5$R4GYP5ij47jh@H)`wm?MEuPXJv& z;2ISJMIDh;Y3*s?gmJ;Bh(!7zc0BuAjKiL&h*Gn_8J8pwi%dFZrDk4QN|+5?1Eb1N zqp0!RaAJiZA{ybm^;Cwin_@J_zg^mCp1r|xI@$-f64rBuMxr)OJ8CTGNV(Hk>lPBD z712B56me1;2;L!cRw6StDh_J5Eu}w}&8h#~kw~%J)7=&;nK6P=AEXi7a1lAv99RrWfey7uOE_9Xq5ww&XEs`JR0kPY zRAM&)w_YLTm!L)>joer-q^k8GDGfI&1nDgo!B8Y#8E{l^5sh-r5+$uxP8y9-h#j@p zS9!oQ+=~~jcE$wM$c?ZBSx`1|%B-YZE6q`ddZVTa96?rZ7*SXnmB(5amqG0|8n3gb zY59J?Cu0($XPkJ7sH7K)sld4aP!@xb#%rs*kW5LGLILO82;r4y&If|ZZSC;2x_z|H z7T=%mmlkdqSnyPvfEXB9X(>{4&T$)=lHQ@xgz4xk2f;`XnuG96IbHAQJDvojzS{5Y zFD4aQuNe18YRySX8A4znB@j=gqynj8+9*e*u-q7r(HgJ}tfR*7=tnpmPp%&q1zhh< z6Aa#KCqx7Zfz_Lc^HM7-Of*t?s#yR}(FSDB8)U!LIuUBFxDAkBbsQPjX~N-Yo2R`Lux<}tN4x=muo`4%k$CR$N=dL zW1xV0u!u1zU{0B+V<5HeA)*6qt&GZf&xs4(25$^wRyphFZZg&xWC6;jP(dtUjC&)7 zOCpV}Rl3d>%~BE69z&2U$|!a_lxc}M$9f~&8zl@# zFd${5o@p0M^gb}HyhB1=WSV(FJh)mf)S+fyOmk~^#2^^=J0APJ!O7_QRbHhr-J6RW z7mh)Yj72So@=O>dH5W<;>os_c(o>>1kYFRDT6^vSuwcB1M#~+|hiu!yDje)`|~DB$39F}Oh*+B3g$gCl5+*1Vi1-pW@OM- zQ9+`J(Gn_52!lmQ{d6dcri-PkY2oFIy8MzZOf=3mYp}Wy@R^&A? zZ!Zgk@Dj^%PggA|HE6limZ3o@DB;1?$_8+b!9{8rXsvW$js_u_3K4V=phUgdu-$gR z87=#xLQ3Eusisa@VYy~bP$s<>in_?H^VZf&8;RD08zQ0y6J0cA_8wBo&^sPy%sKbWf(%F#%LPRXp2k*5W#I9-fC4y%*qc~%xF2w z_Y##NMurr;c1|l%Z*@v2XB9e*F1l!WtyNg3kvk+BK-99LFqAl_LZG{8fBPlD5%4C` zU_DXRYOX~H&T}3^q=7&r0jP|@DPwJvELaLgYC(yT(pdFb>ti2su9r@Or`|F{3^Ju6 zQI2Zq6*?azacHR48Ukg+6Xwx)MYITn5TVU(_O==Nt%$V`sf26cxYpVVtAj@3Sd0kH za}&69(E&0A=--_BeeYMi?qW1TGO49$}#1s@KK=E;6M`=f?_}{ z(|}$_qll*rF-XchtJyG8rzx=jyAg6Wf_;d$K%4{Sw6@G!B*qKL(IS&767ULDEs9g3 znUYGiOPojzXt)zB+|`D2@Tm;}+Qi5nBV=V0(-ePB%PP$-+BYgbq@z*dFo9{6kD*FE zj7HaW#<-CmF(g&qK&dV-;)BI!%8C+mEa@VKH z4>%fcDAvX&pH0q#Bw6{rn5+lo1by9-A5F^&i|1#j$*VlWgCv>cMH_K`{=&rvbWK~6 zCYZV*O_cNz`YJCr;kux(i^*pbHGe(h2qu$XAaePJ^}mm0zeu;Pf6jXAUs3U}{_iFI z2G;*#+2hX@78#75pOvRcHXV(I7Ji*^MEYw#NhU>JqdlqX9jFq;DB zSlt67xQESk6Wf^N;bjAKjM>b6{WN)91S~MTK?v9F^H-zPLwTCeHAXOo%3r?dPQ#S-iwE!@V=GO(Xrmy~6<4X|&P}cj|~cbj8-4ktFj$ZYsr?W@&Z3 z*c|HQ^Q?k2!(xH;Q3Az9xl*VuHXcL0ELL(cyIRsZ-(g<9e)X5v@6KModE3z^o7a_2 zn`bC~YG2z5?znlU=Xm})&++1QpYA&rt5APc8qs_q29-p88+v_(^cK|+rW@!lc`Lj1aJ%94--7wz$sWq?l=Glv9kIx4xOo}{A z8-;(Z&QH(YoIij4YQ4_t+m0Kmz5C(u`42bJp5+0T!_|2*X^8Asub)19_x5jRH;>gS z$ob<;8{yqL_UrIoZ5Z?Ow~wFv_1)vM=kNad>~C8QdGpv<=lQQ8sKbltkB0nlt?2W= zyn6lS*}Jzd&fh(I^XB!N^MQKb!>Gjdsz1Dad$wWS4mMSnKi^bA2)6;t{A6HsHWBgvJbw07GV5(UefHD4m#?4R0lTNK zUp{{R>fMvquU0e|+)w-J931 z-@beH^6~Q*+t17X({n?Ua|QmLxLv=AA%2J1)%Qg{?rl8j7%uAVF8A-TsJFkxTX-iI zMH-G~{O@#_MUB#k{@RbQ2X&NQVTNUSR^+b#ZT>LmY`2`LP<1L0Lb|D1iGEt7)%BA+ ztMKQl|Fwdmnofo)Uge|d7@uM-%=9SxzvbDvpV2qPX9Ag2aCre?Y z`@LR~k4Gt#JxZHVKAVn4=d&jhS3YKGwE^Dpk-lE$`CM@Ae#oz|s0GB8%*CMorPd2q zKHW5=6&B+(YXr!@6yWg;i!=}C=<_U;r-_^w%STwu#LSiI`6M6Z7uV;Ly8S14R#pY1 zS&#B|_?LO@?phm`%RFF5&aJhZe=E}Vp6+7c%Y0gR z>`+;>=l7kQdgV6p7*7*Q$jkKM$;Dr5>e+u{(L9PCjsi|b`E{)ke{uT{=c@nyqo(ZP z{l|Mro9#bO7c<~~%|G)g_{Ho$iw5TWai7MW73{x&b*Czz!i($KOTLZC_)RSwK!snI zai`Z2zZvV!Z`&I1?EiiH?(E0!UOa!&=GFHfSh}Ac6oz|O z7=Gz3_p3^q^G6>HNPVQf!dqz9J8RTg2F}#RZ_Z{+qqH zF~%RI*+qe6Isf-FIKge}U-RDiKO;=bhxLCS>Ep+vB#lXyR~y8|YE;&eVwX>3SbfNg z_i1)<{N6OPdBHiJx0B^&&at`@=JTuZdR>uQ=U-<3K55)y8>zKjJ?oQo!3btX_3vh8 z@K#x9x!UQ9;cQamKVQ#s8g8%@AE#r7#cbgh)6v|fcv)4Gd8r9hmmST*b(@>ZJ}ZrG zYI+EP0W_Aao+c+Nzuz{j3Nv}gZelqmMF z{_i8r*8gu{?bj3aUeu5I4i{XgeD3z-u1T*Oz+yzj3?S z@9mFm?thTJ{a;A+!2j=eza+D{qObm$1e9jq|y)f zKkg;{%GdvAT7H-&RBvJYnU~uWoA_FcKO5KoK5u{Ay8Zajt`uhBTm**T`Li}%`e+kdu z?)l8`*X?)plKW!sK-0=#3%Gj@ohuH;nHuz2QBil{6^--S_zq>)U^&<`44Uy`*2$ z`Okc#9g^g>JO5ei)L+o?_a^!ED?0d@rQZ54+qdRoIpgHBf5tHRa{j-R&~^V`_2B=1 zFKO=oi?`htJG_7jKfv{2&mr)N9?V%pOh;H8SNV8!n*5_#0slV2D;x#K{-gPc8MqOK z<-W^0E!TTaFMX z2`9-v|8v`fzs!@vk{`DdcUoX_)LO6Jy~pe14Dm#_ViP7G%c}6 zQW)0&8ot+9yuNKrUf;e*%j(;=aB-1l7vDY(<1|a^DH*Mtqd-MUk zP5#q;`HvA!AN;@XCGCU%f7L?cVnz?Jzx#I{>|(QD1LuxBeB=7xqYikh{3qAqKg);s zANP}f&+GqluZ9 zAn9ao|GMQ^x^MhhKb!s|B}}Y`#O)fv)+<36r|V&`JI1j)RM`|xne$F~D19brKO?a$L)BRwv26n*h){XLY$J=GI z56-v09S4JrlWF7UuhfSPg&aQXJ?K*W^KZDa=Wup{HmrOZ9HvIJU7zVpV4d|Iv49qPqmT1&QvwUpAv zYx&cEp8V%Ghn*6bRmJsLo@P}Kjico71G;X}d3>4VaI4N8Y8xi$e}0qv;~&XNZpeK( zkX>NN{T{Gg8>L$QJDTq~w_seC-2)DbG3%kI9@ex$XR4uEl?4 zTtDpp_mUpf{JO#-E0Zcu+BuMXxWp`R({vQl>>`%Y-!%afj>d-A0I{~jCw zZd?D1uHFBW;^F-NUQ%EF$I=(+q-uUYPA)LR0!GPXT1@g18_P)2OpbadPuhE;HIG!sP;qW-U71}6SMz3Dd&)1}5x>)P)B3+^4+OvT`#)3)wf6q=ga6B;h*`f~2p(|Cqq4df)hsuei=a8ypvr*Y1c4jv_slTlh$HNRRO z;w9B3R7rW6Pe&msrdfm4Bo7T+Yp`E5bkI5aOaA>o4*JIYfB%nzgHAZtzGKb8cAR|t z)H@(=b6N+a8cfeYsRrDs+pl>ri-+5oaniE!xL@i_w>0Xps9Fe4|(xEj`9yj4$9P*kIYdE`KT>d<1)KWa4_i}`272Se8rpFP5ggp7XG|| z5I6IG$taQi_}@xB-2c0m^zq|SQb2Zr$sZQW^m!KW=Wjb1KW5VsLvs4(P&q`H2Z7DL}I^RhW=Kl~>7W&o9XkOz9A2~zfl?vGB@wsJe%e@+fN zUSfy+7G~#$8?$;DD?d9wdVl@_MxB%Uy3?Dze|4&*;coM*2lUVLovD}Kfw+#?O4BPM z#7gt4!o}TDScSZbMr%y`_;EHqpFRzk6YHm62#kUDoYX>Ht1n zerZ7rw7HnJYfcD+{^?AM@uM61tq}GtSdY(f)SP50Eow*bB({Xq09bKTZOy;d*rRapO@(nd&uPf?6AWz{QP4ESCEcsOm2bI zDcOZcmH_)Z(^?RrVb*$w8f!mRhHRbZZ!t=D7^7*EcRmy-X}eLIKvAWw-nqG}7lk^# zOg50e_%Y}w+-+l^Kz9_oF2#qd%d6i(9T(o&!sz<+cH8WhTm!ir_LcgF>-_lBr-gs; z;fXJDx2{NrHFu7B-N<<9rP`b#(Fq1ubD zgLS*k%tN(1-v{e2+>wVW&p!^Vb6B% zx$->&k{p7s(kpyg@2P2qGq>xGSUC87b!#6ivQdM+0hMW$H7r_rZ3X$eRmYrKdv^s{{0U0)ywPo*^|}v z`26h2Y7=Xvwu1YreZOxN_AGN9{#xm++BkdubQS*D^QY^uS4wRKch#yi&sm4OTJBcR zlLB$v9C`QWO8-kwYTsCIizd^&WA_ZeC1$wbB!7pN1P7sCjFv1vZy< z`W>;7-PiU9Yuw(_G^^6)az`J@{+C{R8|&TLWa%$4Oh;I()?3N$DObZaZf|LYFLpPy zZvLPmw^rwljrC|-`*u^Kcc!xXVYW;km*4;E(^uVgd!PEvZmfBGtCvvVS&@5eCqSd; zbXp$z+-v=22)AP}bs--^+S%(CS-qClOK!by-FSbivxVJq&#Q~hjYWGXt{i@Bd6#UB zwxQd0Ln7Bexj|uT{g&>EVzT2IQ=5+F7gg&XTOK5}huH)A$#PyS3$LG-EO)kEsf`d< zdwvPyaCysh1zdYR*Ht3Tbw@TDr;Wx7TETep9bAk3-GX`GZZ?A3m$us=DqqSXFCdSXJAM+uEJ? zq`&H#@5?pU{Op~_&U05F&mJdUR2f_o8?b6F=Wjv@ii3S=LJgs>|L$43=3Nn=K&ij?}=K-BfA|#J1%CP4t5f`(FaZ>}tvIDO;=7gcaCW zwf?%f`g(`7UhgY~7Ga_WW>fv`B~0csYWA}?s|H{$o1Rq}*dJCcVmFl=-W!(n51WWo z9VkN^!-j#G9YVjWX_{swR!b@m83ehO|=VHc7UjyFoHGP`Pk+S~k?UmF~FbEdmSl=l)u}fqDJrrqB?C<54ha0jtOr_&_Xo{gao2SpFT0?;@l08nLfyHw( zdBgL3n>sh(dPolMGAHi-VXss5KKtx8r0=EY?rZ(sXUFbD(--XRo3c0TaChsKTR6$R zEZqB@?7!#?xJ&jx=2}+oa?eV^yBQIOJbByn)f*CP)Q=vli4WGq2W#RyJy;VTtclxM z6CdnV5B91Dd)0%z>g%*uJs6T63`twoc`zjX^1AhdA?d-8^k7K(H5!t7fzJoz$GgV| z_(NL$)O>jz-(IHWllj%Gy6&35v%SS?^$PhKrTZ3oXG5JAH_LK^0_7zX7#7h7+WSF^ zpNl{UJBLBoG%c5*5LUt;tc%)f0B#w$_JQ2bV>;W+#^nOrgvpA1vaeIzVn|%-5c47F zYs3YvD#EL!Z}iMr=cc_Tm&4w(BP|{pSZ{b)XScRD&aa*+VU|6F!u;EwFj&mu)zb6q zAq?i81))!`(iArKQb#^W=_y; z+r(#Y5&PaAK7VWI5BH|!vnxm6s-K6r`ZiOvmof0O%fUCb-V|Sd0cA^`{yV%wKI@O) z%N6n&xxds6^1i^(O6mjGwfbLHChz8IgR833;C!J6>H2#?JsdhEl2y0gfdU)bT;a^aFA}ZccGoA0x)Ub- z`^~|bZu)t2baZg=D0$KZ#7X9HQ0ABCnh?p|2Qfe1787Udca=Zh7WQTRV(gajUb0;R zy=)i$>miueZ!(zI;s(T{($HX>8ta){)fDH-$*f?*Y}f^hxylE@Lvr)^bnoyZWFp@ah;#TfBw=z zOdrzDX}kEpvyjp|hJwF2{x79e_1}M^R6V@^a4+fOGQMobMX??2dGzp4pOQ!2V5Ln! z@VPcWEBt0U!k(XF3#2KWti}jl0%(KhHQIAED$iF!LLDAkb$djGrxSdGe~!FFp9-wX+D z8^poSX|oGru+8luc6Z1wsJXOp=PlqH zh3@r{U6;C7%G@A!Z-TvrU_Vs!4lS+Em5sW?tyF9kEBKbN|JCah^YQq#=oUlMuJa7U z%0h$?<}X;3<+L>dcWs*ULbK)5m|=#>?4r-iaU0ZY=Jw%w59q5nhcz`vGjbXkz9r!( zZD=_BW^=81r5StJzGAnzJ2$hauwofm@B9I?{kmOvQ>TC*<8dF=%c`0XdNk|mBVUi- z9T4A8eT|Hj>2VtsdsJCF-I8H%lV90mzL^Oh-8Hh-{j={k&#qx!U5(0BSRGf$5mx09 zX1=(dR7Z7>&Zk?Suv;hS7i@dO8AN|v8Yr`0)U637Yvx_wQd7EzeQcZ1PC>%ak z+aKfm0;{8E-`TWB%=+fL_GR7{z-r!L$fga|7xrMWK`| ze6;$QNJrhdvj#MOv!T55XL->aev(xJJD}!5S5tCPXi@jin+qRpkor1x|CBb$*P4LW z1|uQgG=FpfwO!Hyw5p`tNWp6FE-DXG(0tlN!Oe1OANpX)n>uGvaYdk9>xJHrqsEz{ zon&2t>fHWn>%O5rf$DN`JaIJi`gZ5_ux09mL7ri}!5OnLk*?$eyi~s@n=5kKyMx^Ot_s-~O3fgCAao+wx_&`+Sb=CVl62b8ClzoLsS) zq4kHw#5l{t<5>wT9-bh3@LKpHO72{%ZKl~#v2?#m0o=jVt=gCQTq4xQRyD7olke6 zxQp#Sk~FJ6oBo%b3t4v{y}hq>e?$Fp;MV3IM*YiC|K-Mh(G^3(zfY%}?d1Qt3FS-p ze-K6J+WXH`$OrlVKGMhA!W9p^{k&=|Ty^bOi6Ol9`13AKt-sL8^sn~(dEO&M=U<8?8&c@Ze;iiV_-MPW`(#e~8YM1LxcfW3U zx9=8^cRER2Y-Lk7O3TYvdDXnK`#6LG%W{t3rmw@jbTh22&?&L3`Q+pI1CdSPUgmJ) zgEe<)bM`#sP|G*GD|s`porY%qtm0qsde(*AoEx!Nsd{7Moo5_wm56SZg}P?Jp=TLx zd$yr(C*1OxhCy513Jxn=D4D%#x@2IR%dLT)tW}zUoj99i(tktR3G1yRi-GI6Y zz&bXo=ofjF`+U@^G|TOJeY|~gcC*jNFLt-2S{!<=&H47wb3w!0*JqWI&rum4(r+kj zr~l86AHIzK&xKsO|G@}<(EslvZE*-Sa0d19UIJQlIev3s<(rGPKP@m@IF5dmr-J(k zSFv`;w|!WO^&acTpSb&R-h5|Y0lUe@b2bzYX2ZeW#@G|TgJZqHI1|4Od(GyU5?|Cz zb7L@w9pXm}>M0w7L2MZ_;&zbB=C_O!aTCaXe2BZph8WVCdNCnB?js(A7&70k$AMVE zqZj|7ANygOxDVSLsr=G=^_NID$$!TeX_a1Nd4ZqT#x3$6A+#s|QKg7@kpJ!@om5y> z2PemAK=`F`c}Vx4Ze0Jhw6F&TaO?V)tiS%Xe7OIAFRAA7CkOTaFQ@M0AmqM0IjDI; ud2(>@FUR9>@Z$NCXRpqmJs5``(nESk59v3R{!ah^0RR64v~TqQjsXCM-8gLk diff --git a/packages/grid/helm/repo/syft-0.8.3-beta.3.tgz b/packages/grid/helm/repo/syft-0.8.3-beta.3.tgz deleted file mode 100644 index b7a2322c4908be646efccb7044b321ab410b9e95..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8990 zcmV+(BjMa1iwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PMYca~rp^AfCVZDR8OWlQ_Tdt%uF3-1^Cq5^G~UR+4=-d-jwL zI0KQmni!{EE; zNZoF?ySu$D|Lu0W_P^boo$c>>+dG5aZg+35+xxED>-TndzXRQ8;l@f+#v%Q#`(#>n z<-U*vWBd_OhDkI8vwj1{arU#@e%&6lMu@}qpy8t!MLvo=jM%Wz0uc<+5O^evNrWQa z^288o06BHs2$*g1<)bb=$~ch2XRVn+CA*71qQIyfA6IOYMm7Bl5Kz~c#T z$I-Z<8bD+=_&i2K0OL5o9uzfb%$#lbx%=9cwD}(p@Ag?}ESvv>?OtL2Z`bpGBk9@a zH6A0zhk#9Be`j}iGxGYQ*W25Bqc?juz1`Q|o8Fr@usa(0-T=PY+3EIR?+qOEdjq)R z`y+o3?!6v)+vuhbM{nM|xoM11gec_5AA)AT+aI*LZ(9A=SN+~_duQ0&ZNJ`qv(w$) z?(Y15-JRi}+kBCe?gb?||7S3mBKB+wz>@jj8}tk7|8~Dy&;L!NEpUi#;56Vs4FYgO zs9e?BjV*9B!3@ zoPar*QovCd2aqGyZZsJ3DB^6`0FsdS7XZegce@|s-_d*sn%)FPV-%vM{N^hTF9_jB zA;iHDG;wr8{(lmqC={!9o6vCq^c*sFN2s5n;}gsTls{4=LlTXN`ZM{V6+xe17zYUh z$>-w;bAM#vb^(51h$=zaq=fJiBn7C3;!NXD^G zbMd2@WnW1(sOb}-ld0@vF?a{rWC)sY>P?;w96#)T) zGy)-^2*44Ua=<4j#qcxWbUth(atwg)!2|`NSP}S?xv+v(RLGVZD0q0Vv5XN1FbG^| zB&Z_$D+vN}Cu-~oL6XWX2E^iqFrOd_pzkBULs3X($ZxB1_YaMv0Wkw1f)SGyL!U4c z0$1%+?{E-^MG;Op2_eTG41&4H2=Q2|vVapMm?sERASWWX8S$r7AQafA@t8s%skVJ; z|6B8a1if1n`K<|pet;NjiH$V&(35n-W%GZxw`5=2rgBp zH$udr7{X$$hEYVgT!0fbLQJnVo2Xr2VawQ3+dlvTI6^^E1jcdub~-{dLP8b9q!Ypj zj*;IQ&4=I}3c?aFrJp2&vjKqdI~%MG79&qk@`5Sjh#sF0!6}J^niCVMG|qH~dxu7n zRxFjN*IlV41<2!smhqa^Q0L^yl}dNdEf~k2L31gY|Ld6cSz0>(2YqY)@AkIq`M;6$ z70mx^+M`cg?5Kj{b2bFgGzeT1Uu3kp_S^>`ri2rZ1VeCjaIThPNXLkuKZAg9%zOZp z75N|)T^o`GW%Hjyj&7y_Lu)t}S~CCldfgp+{qOGd>-oQlq|bjC$E=eJfn*3=BEA7Z zuz|(3Ol-u&Kaog76!CKsU~fJIXVga&`4HAm{>$|hdM`3+O*aEB{E{`rw_fL)%VouDqIR(SiS&saC|MK$hXBUT$ zK%SaeUC1wwEf-4A(f*~QKun20_2l8LUFPG_#pUtYX|arPxLZ={ z`rZEJyM!B#(k!N~TA!XB9$jDkalW{#Oq8i@wiMset}o&{E$Q;htNnxDulLW7 zuYW)Kqf(E{vj)xj@51VyOFz8s{!jot{_XVa;^_M7{pI!1#l_jhrK8-hFkq-y^xf6f zdC9bOk`nNjWd*jk2gMQxXQx;D$EQaZCG+_9H7k_4y4XK(^iEkN{>T2&Z>9K6Q@*2E zNo9K5{oU6^T8Bp;uTRbnSK#jO>}3D=^!niJ^z`W9N=%Ouii7UJ(c(v^`)}VLU7x*u z|ItzAriA9vulpa~UtM3Eon2iYo$MdKuRbpAlPA~UH1Mw^>}(P)afhPWuatz=x`RVF z7ONfcZV?r$Uv?DM8B^@H67e^5N~i$eAaCvk$U++68Hx~N=ah`>Z>d{AvQ{P=D3K_F zJ}xW9kT<26&ksn%(I;+y<&g4e>?$}T!8AmNNT@RQQmnq6kZHu3zWRnD=N$6M5OgRZ zydxBwjz^KtI$D1uRiVg75yvoKEdi$ih#H+m!CXHiOBkMx6#kOZe#8z)bc4qR$0(kl z5K$N$=v!VHV9%Af-_(;=o#op%c6mxVIc_{06MyAw5 zS}iI1_Y^UGs)?CPsFXmxZubP&2d-akvg|)MRC-1FEg!`JnTs9qv)g|LgI(MIZ?D(e ztL;CVNagmQ!)yd>*8GzW!DqAoWCT(PxlZGb!TmE>cQ}O{jpvCck2D!y2=xGR^s)}61~8gTT#uCC8NynTOsAouVeE*yKM@9!O8Pu;MjqvadevFM-b4a3Ga44-?! zePO9g|68X6!1D8d`}}WryIcEzZzO$5=l_~YQKHGcAP;~gYV1pK0kG!3T#ThPzLj`Z zr~f`d5-gkldxJf5{`b56z54#wM$-L#3*Z~+^6%CcdBB9KsLP26@jF6qaWrn7w=1fS(m#19#HN`dkqWkzOwN=fY;$Y1LL1I;Zo7lnCW(RLOiom{H@7(~lRyBKh=ky9Olj`3SvzBs4o z27hYY-vboMV}zyszkL4l;HffT$^7s2dIM|z_qw}-dj4-BCG-C)nEPU&Ze;$;J65Ip z>-0KRy1)Jc#D%&NmQ^m{uvM*&E6AEUy}V?}f9ZC!-rFBl?tj?#zxK9wYWx32(wDUV zlY=u`0nDMgxw{`J7hn3#50#wrf33Ga9&`U=XLql@|FMzuh0p&dT7Kje`U=LM6mCso z;-wgWO6UJNZ+|SE|J(iUz_$PH)b^jvq!)Pq<9QZXx7gV9-H*I(&wTU4(vDt~Yk=Me zUyWaYy6kX#dVF>GR`~)y;Z{X4NKEpZg8T3&ZD zmj1>5NNa3;XR^9i@ee=LZUHjYe#Dy;mLBmToXzt!HupEX(fzcezh)%mxA|8KA0 z|GQh;|2C9fp8s!t&hZ@Tzry|;p1=C}%(v_IYo1blCA$6MyT)df7i!9!LUe;~hqHdW z*Zrn+WyHNsQu~6woaDDW65TA-m}}j5gSv4xe>~abComqC_Ff2a)KYMb1^xTE7rZ(d z$*KQs`uwMF+kbcV2DSdTk@O{<|D+48OOva1{*$fL&*=EONPm4n2R})&=08L54k7=B zC6YY(Cxr3y`TzF1J%|5q?f<`#l=}aoW!J@;Y1nPcA#m1fq$1)52Z**g34K%|@b#w%H`bv-KaKGi9_h!chlB zGcbZ|q9X6I0HGKRy5MhrQzX0z0Zle&tARVE2(*;->iQPV!T0xkewHS$pWkD~f4+j_F^r=|311tdHaxv@PkxgGlY2ZXpJ?=YahOEHU%Q$S-PDkZysp$`SKAKgj zliI(Q9825ApW@l{54~=GyBHH!H-Z%lK|jRBFxZ-LY=t6a@stMxEk5$XGjnf3F$<>b zCU2kNpuO|vn>#+whevK`Eu6*{YU*3P$I zkAp$!U@HCmLS1<`vt%up3>$Olt#^%IO0xEU4riXH9|14f|MmBFwr%_WUblY#X(OrR zDA%7yFvMPao}cCFflQK}f+hg60G^si2nh7w!T?4dqGp^Db>hkF;3X|ErNIz1C8sCX?u{;Tt^S4|B9 zMV!vh368kMMhi6W&`1YOox=D%wZ7lc{$FXc<6Q{}(8mFzL18_rxCK1T@;1eH^WXaJ*%{U+0fk_QSeU|_M8K-EZE6NZYpZcKDzrWE67>;`z$~hEi_&{jtE81vBoSr|KBOZf9((U>h*skX-mj!j%dUHCqRt>a5q5_7)^2D<7f`gxFi0T{$#VLlOqyq%QTkLjx~O2v%9$QQ+ot&8$Y#qDE=lCj~hSz z-^LdB2q`90296JpSfd?N^54kgjW+fX>?m+b{=3nhF^~AD^CfJ4Q!vC#h9*a95hA^<@^$Sr^z@CgDw@une)ltp$ZO%80>>7+K!wil48 zFZY@qFTkDro_aoZ#V}4d6i7N`CN^;BuEk>H;hR(@HMYP$2r%P9Ugd{)fKMO?Y(l1i z59l7~DTt2jnV8IUor%mV=T& ztd+G(Hj;R_(u{+d^tjLWOiPG*h;n6urr8Ef#lDqf#ZbAyLPHb6n{gEJ)RYe`8*NItr(*WR2g29pH`{|ckT$iy$Q%&YXlkg*w*Wo z!rW4Df|ukvK_k`=eE#vz7reMFlK;6`_-O$l7R&#ge&PN9es6cTzW=w8bbsFh6h>nN zzR#xVapa>rcQ1Igfvft4>E8SEAr}?V*B|A}deMDi*=ZBcOzD)>|NA1m5;FW_F(aDaM zkU`46*MDe5RbkcLzksIZCDyblB>s3#B8#mjEVR6N&h@NILwLSYx@EP2r|@`kH7x~{yil; z>1F*Gyy-rvZVo|HKdWw*0cWdFb17M8`TPXkMv!=@+c>mtko6FBeop?D&2RLCs%l7k4c{1Z)sVHa0L$!O}j=p?xb3IMzL-hBYFi* z9EDoi7|W|#47(#Mkx)-_!C&d~4WO5g^hEN=A!;udc7U8zU}u;~5a4M1A@-rkS1C|C zy{ei3Cn0xq5SecjN=^77{B${`V`Np&z@Nfsm2LKS{!Mq4ib9&hgq`B}W6GQju)oJRG1(bQW8|9ltdHnE@`3E-_Ho{WZ=atrt zr>`9ASv#J#dOUvpuo`!?*B&cb3oGkzIw; zkIs^o&$6q$3F5?JyEnVgSTH4hQ0B|*?vWB`C+=kBPB-5P63~Pm$1`*&)>Is!#O+#B z3!UH1rQHcqx`ST8%9Imn$xX!ZzUPTwSL7B&I!6n@EXEcd~WN&AaMue97!lFXYRe;Oca zmNT-g&6T^va!Q6=)E6{8-?@=1lv$CORky;oMMPJwC4ZMp(|z{q|2#a^=~|y`vZW=L zQ=LGH&MEPb8US*iqfvH!wx}=0u$+VG$R{Dh+FF-oS(J*9mDjC>_qVhqY{@+@ot=fC z7K_H=N5#8jg}S+J+qqOO2b@#cD&EpHHYSfeV^XP=UQ`t=mLDXk!)(X?Bp(+!aPhn( zU)e0EQjBKH=Qx`AEms4bI-k=woz4Ro7rWVR&z9am-wNtdzhc>n^ph8e4>2_%^L(3X zS&2mi)0veIh9=`aj+!R4%(Lmq%%_t&i#+5aC-R*n#fK-6<$P`t-Slol0V%ZnEq zM0vd%Kv|e5$7mAYPGFplsN|_l|FLK%K{77W4{n@AT2L%3b`<}103RFzpGsP#$&=s+l!~gM)2aPdMr_VUA+xc z++!E+Xoh_Zg-X*Ir)0)YA`_yJ(0TE@%2yN;n8y>zHd&YIB4`mZgUD{=O3Hp0TwTWtXgQtF_7!NwSfNyV=41Sx3Ms*^bOYmUX$u09?(8=BSoo z1?#Qanpj&CYinYfYHMO`O{`{3tnF2`y{fiX)%L2FX|JjcNwp!VVwu{I^!fMJwIQiC zB-MtbFVT=>1wMD^kE_QA_#U&5^2^!H)daJH^lFwUTTpnaTdd40N*CAv`(^LfNp)Lm?R94~nXG0l|`vQxD`mP3Tk^jrjyCo_Bc{mQ2%FFgJ#&iE)FPR8yJbhp9vUbT&dWTGcIo)CObJQ0i-q*t znmEWtu?d}KyEsU{t;#_j6Y69B{(=>dSY(~9Hj1jb13hsHWLafTM?6VT7ew2$i72~g z3@MXVXxTFH@Y`?+qA+X{oy)E(Eg>@UXPEidMrRT{x`qWj2J)GtaYD5YsgX7`Ik1>RPDqDUVw_|L_` znHK$QwOWnF7C4XraX=aeCA~Z+LnP}DV%{%{iBtJq<@;q}Uy2uFE5>_?9uw%LTKKOz znAcYs%qzPAu?6-y5S_zS5Pw2Bz)a3WEOanVzy!_^_&w`k;Hzk|FcL^(f+L@pcDazjwdMmXU?e%?NJj`Ew!QTu#R7a;gcFQx&q{2Ub$ z2@K2_!8rmIJWuYPdB84>kWkI0R;+-R2Rc?69~b=_{1{Ul@f*2y|Ww6&_ z>E2;!Eh@zdUefnp{GKB1k1u6!;Tm?@GZ4l^gub7?V8Pf_8G&`1W(t%{A8Ceh!8~R^ zlk(==*QEAgu?E;%oTjKvE2)mu;Z+q{Sk|HWs=QQMC=n0U3+m)nCX*E~EF;CncbM&q zcHy!{fnJE(HmeiP<8H5&GR!=OR_XQ0P8ZTc6;2hS_1+cgog?q;1}+mw6QeGtU{asib&j58f_O3|uVe zy_Ks-cJA_?&dVBP)x$^TV(k2qh~?(1px~)sB;8l?4;_(e5)F~LNn2P0vvsq=?i$EXWi?o=x7yg9kc%28D`+T` zg;ubBw4^hI8f1EfM6NyC^f%OF$S2wHM9cO1w)T2hp0qH?3CAVQn9@XQ{3Djs_ekeI&4=MI?G%#T#Q`J;{vKH8RXx^kc3`=DTV1cj02QC zLU)a$aPYZp zUUkY-HT!Q&{1%*Y!aNw@XuP5wc**;p+k1n)W&iE(cI)>)HSc%VRr}hW z5Q_gK5r=_hhaK}}!0ixzx=3I8DZc%aT7&DC;VQlix5?+&7VUSIn_D#j(wX%WL+iV2 zV4M?wKY>8BeuAv_T6mUCZn~?L!|V#uG2$2KM%CH%Or^*#K9FZB;tPgHh@*?ekDV#7 zTqYJ@OjsgA6#~R5_LytGUV;M7ko1&0|B!BD()YcTFO{rg>R4qui+b@i`ORem=DQ4q zVqc+_^s`S;%dAvKo56(^fyFp3PyjieJxgUja2()2)uYl@UA0fE)L7^CFMuQdHnz-xT``NS>nywv24B^7#&yR6x{Y)p*U+npF+9Y4ImYCLYQ`%zL(9FC|RawpX z)HO&gXrpP-Cyr*(@3|(+a4SE8D}ls+ktcE%|zmyRyYHE0MaQ z@a6`4dQd0lr^Un_SSoQbK``Kxg9-9(Pl>Oc!8ks+IVi1yuk7yo zK1GbB43>R$x6)!YDUf#oy69 zX+k|`As0r`g~YXI9F}TCi*+I0Ea-ZcVcD|{wwkZ zLZP@&Kqs9?EHl8=o~p<}33$bt`C^}s&vv(jXNR7J zF|Q6ivpS@{K4wjxvSqB(SCp#l|C8f~=h^=cws#8mKlg8*1M|9ATK{NLND@BeQk2^rsM zi2qNfqfWyo9_uuOOkkZxDc zVQyr3R8em|NM&qo0PKDJbK^FWXnxjTfzxd5ZMH@-KztBX)m}Z1txSCJ*IAy)%iVkB z#Ya{U95R^*DvZ z#8C!O);95Q?`#CoNf3eEJHPAYOV+~8o{deq7YzG*S&)Trl`rM1Fz64m_9*H%A~l2# z${#keyAgDd8jZrhsGNevO+ArcdN#IS3jV)Q;TY1-CIBAbf8q+a%Kwb+_(vj{0$@{ImQv z1$7Hx{j_uc&ni^I#E~EL$4SBSd3zRahOvbX@>Q5ZqfxfeKxh;vSqC{TCT;_vL7ZkC z)GXdM8wlmns1zs7B#yIYPQ$eM210L8ul)I450y<8-5*EUd4ZN;Oe$bKiVmibDg79S z<015dU*Y6k2NCSB;&EA9Qc8=T7jc|@NFnKOHa z#bib0L*E^$Anc%K5c%={iboI)^ZlqDC;bvMjQhuM15=RlVGy0bFi!4%ECzdl%n9o9 z&3PP$2l-fc5NR}msGmSujLzhr2cb&Sd_amhuRK)0juY6M+umAzmBFpLtA*|xXx?vh zh5Vm!=icerWw+PPe#tf|W7f-mA}}Tu`A>y#cJhB4rC5rW0~KB8g1V6YL6l}H41=hT ze)|ozOXDDCG4$zEyMX^G7wHLvpi;>9hn!n&0en$DTm-+2Q3xuD&@fH_DIJe9lnr2t z;VsG~NTUGF%U{p{!XZjCm5tLntf&<&Jx)i;iSm5vdyHC49O+VQe90+T!gc!Fr4XT z1+Ow$*5+}tmK4ppX(gXnq#R1mCvkAde2>I#qhL@P&9A8Ufa#V(_Y zfi~I$aJd3s6NNblD}Y#sD3?5oiZlH)nCvvpEgL*lmi~c+M`xz>VvSHVb3URe8ZmA{fVa212azGyC8&fl?{&La(h}N z%2`L0vx+F+3hIM(6a!x?X&fz#t6a^&)7+yTk;Ik6P)Zz%r6-gk6l>vI&ZSGe=kZ6r z&pw8v=P`@cgy&3dT`V2f^?YJHpGylK!-O#DdJZ#MDrr39fFaVit|5XL$Mr3SXOVOP z!#IOd72N<0%DUPxS7IfVBF2xSVm%65k_Cft3s!A%GB%YY!MPhGU|c zls@hsmry>H(KieW!&ek!-x=Ac-#nZv}8sCCB*vDv)BV+gm~UWjP)?c zHb{kXJR-0m!aCpx2TP1CbBs0C^QoY&#eypCV#8claf`V_T;+gZnrThF=TUKxfXd)t zoM!Q`SXKuE6>Y>I)dqXksGKJ}Dt)S{a+X%%fToK^gI-qOPZ0*AnSy4Mr=fU9gTJ7Gw2n zKHtQ4;YvXT2MqKInl>5pGp)J$IYU)|Vu_H9GZoa=gdze3rtjadB z&Z%duhZSf+2oP(SON{ibu*!ER29HoCxWU5VMkvQ~vFjP_`dB$`t{iP~u&HCTlD@XC z6o$Lp_j2iJD4~*BM>-%RP|!Yb!!%YHT#rfzYvMREHzJ?EcNL;$6T7d{uFNGKV+uTC zu;+V{Q9~VSv_oBkmGF%Z+9if4<8$Ao5**+7+&Heg#s=8)BBkLc{V{;s4GguQj`XOcKDgj=$wY2QF-I{f z90LLzA1e#o@dYD7X+fB$sUsA}LSx}>b6o=?*aDX{A4F}q^0})F_^t-dL2=)3p}EF> zZf#kms1%;h983$F{RRGn?}89EU`zRqc0B7yA0$zjX`h3~gptI<1Ur(de3dKDwH$Ne zQ7$zo>$+45t{lK?{B0YzrKC?>f^$<(YpSW|b0)Os`c`;`Nk=#yp`O$Zb3IpRZ5Z*D zbgA+gCEBRxxdwARzH~g#95@?T;I1)7IF@ThnRP6qQd8>-K{T^2ry3jANXtEH87Ee1 zDJ&JtDoH(08oA9;_qgM637+y5VC-t}snOC?if|1cx7HeFD8WFLG{A^QxGQ`QDL z1B*ChNKkQ-n^jM;>^R90=Ge!hexfXthjU;00CHEpXsMk4>oU-zH2`HpNYhpd z$xUG3MX_NI#eWLQD(C;MOZo2ne;Z{X{*yGWgUEJJublM85HgiZpiTp!P-zG!0Xu3m zYG02vBoSngwu5-DjMrHT7gzy33lF-lus;htU0rRcBGm`m(sv#7J%qysLS?(vRoh5M zVDhe-aGYk49G!R2X&k{M0;-^*HK}_Gx@fKpjgq*CP71+fak3d=O*|aM5k%RnpN}PP z8}q+{|0itUQpUvN=!LQ25AZ(~c!~c7COiJ$Mp?)I`{m>>h=E0ht1Oer`yv*nvo1vE zH42kPtJP{WR*XjxPx|mU%)(P2Hw-F@;;e`tn2g0@7pqJ-35r}X_{~>3u4VB3TFP2k zd<{{}3?y2awi;)HI0=3&2C986%ic}r<{}Q^5i^tIU|;j{OvWKhHZC;^-Y4;Rly=bn zG@Jjoz>q*1j}rsMvr%l*=3CS(gOP`7RQ#I6T~8|9K%xs^Q+mGMqGlhm{7)F9#ot?% znL+-Kap{PBi<+b9BMTv9P@sBbfA+$-Uv0X!!5W-;_M(FL=U9Jkdemr?)-|1GDg%EU zrW+s9=W@ZjAhJQ!e$}+9ybK}s9k`cw>2kv9uPRw zwX|jyzi8Icdi`%RDM1g}e=g${{ZA0Si~rtAd9?kvu*zSnIGC7y@Jj|sq{5@~w1c8? z7(UYQLui!5S#094gDwxwOP`ub`Y=i4DS)Q+i=>)440+!_mWg!5t9Jf)+WGZ(*HnGT z7Ok}|OO@+jYW%JfFsu0ges_Dh40s^_!x6%%+JBgf9sh5m%=3S)>fEaoy~z^jRV<%Z z!1&L1sZ{0IRAP)`ds4s-AG#L$j332?296oR@HcmZwwmBzhi65kk}HDysN?5Cz%YOu`oc3(DW= zgL)rACXO-{M3BsgZ6THP(*?9S*l?(Fwpe%?3{@Y_$6+{KGW+4Jx=Uy8A-cIA!s*8G zgX~?MpB-ME?w@q$j*E)?+wYQi*jYG3@&gF%MD1L9ey*}XXEt|*gP^v#ys6&#{=sKx zuTYPd8a+8XeSda!es*zLZLx!zbK6~!VmsaLnk&$Jf7Yu(vxH?3r`h}hTtxDr*S)A4 z@ft+abl=s;&i8x0AI~lhA0E!q=>7rjYKHWlgC@;SEcCJ)^R1WV8tytMiR0{CrRi;) z*rnd}x)&dh4!X5VYnkb$wJy5H-Thuog;5gQadCvXROe&&qIYz5x_)+xJo1~FG+&*nb6?67-v`P2FOxn4v$%@aRjISyY7x49t zp4Z7~nsOcD-sS$m4_Eu=M^`^|fBH;oGoRBHOEb-sx!xZIKfv8m*H(xEiI*3}(fiZ0 zi|*Csaqp^oadCFhtEpF{Q7>2h{_^sCmDH(gDu?f_s=ydsu5oa7dbxje+PzpIm73Qw zU9NR`Q6qa-lkuPX-S;b7SWJJfskVX~!W_q+WM$Cp*!oie7jB$8PuV>g4Rh>E)A<&TjRn#rt0$x*xiaz_(!J*T|TQ?*3)>>ipvD@WVmv z9B0Y6hDi#Z?0q=t){4wJNqc(IJvqDh>FUR`iy!K1tz@do%@(zU9G|_vI_`e#K80DQ z*J!L`(z3f(=VxcfS4SuN@7M34g%&cssRVucv^TrFW3QhCwlz_UI;$0zM|I>NUrbLK z!TJk876uZcn}Yvptd7q`Vi@>f?o0@kM#JC+B1qHoB-Yh;<)F+gsTQ8SNkG|PRkak% zI0>@5gE-3Im#q3WQ%N=+)m6MH?#nrZ+?=Z#YUR6=d??d-+pFZw{3~n!_FD@D{%i{7Z{TS!-n276 zBX7#S7k6`5(BpnTi26uH76nmm5a;?iihWd~Yv~-{WEdW#zgS8&12B&(t))H}K?{T} z^x8&gna8;>J1`o+5E2z0+#B8V?{lqYKMYh_nbnz@-J4t;weT_uvK9F5?*^(o>`k3F z^Dp8XNCu!5ZT#5+ey_4huhuQ!43c3G6{kG!6J_8Wk|4G{FmYtl4&qa=ID}*p0I*O! z9>rnYzw3?i_7CDH%@P$v70REpoXT}E=j&KE-=kbSq!~n6c|+AfsDcG$z~m23S7KOL z7B}}OE~ozH(jWhI_@36vh6TrzF_5Mwv7I003sCmQB*@?_nz>8v&y(lm<-f*|&Py&d zz2U2a2*xMDhEEGC{(nCyE?u-O3!^Z;8^TLtzrJ}y{1@fb^S}IGJOBSS%4+}r;cNxG zV)y@ay{>><>HnYAn=3^FH0DCb1hgai21a;k={2 z_0j?=1#f4;;WSF(U#17@g`m3EbH_ss$wVg2C5Q~NY&3-$scbN3F6sH^rb^mEqpO;( z>p)F6?py7ky@lW9{J^x{1m;b+&@`teh7gz0Tz5S!;X#cxa{^#7; z@&8uJ)9$-y)_slbbzeMjHd6l@Lt{!ujlW~zD{<|~C*tyUCg6W5$vo3Pqs~9j-@-V) z9*n#vp1oBNhO{hBiS-x>duCtfFN z)LA)uLls1=60Wgke!ukgwJR?ywff(-&wp6e|I1yrJOAHG`9qxlEGl%5JO7z&^RFiN zXWFIh9Q;g5h5u(aT}@$an}0)l<22dRL9_?a4bmzdlpjt^Lx2&Y82$CHWfNu)qh?BHQ9XcKr8n&A8t%|v zew)8N?-QDYBGgi& zQFzxx|EK84?#LAzArdcm)791Mf(PigOGjI;?AhmwxL&-kcxBA>BG}WSt{3Z;ao5Yuu8q8| zLD5vX!unw3bqTeLyZ$4WHS&LBKEQ|MKXI47|HhqN{P$MMPX9|^dTWrB@-e={5^Mnt znMXq@eGHlsYypikhlXKH9)V^8wiT7N>d)MH8(vXaE&n%{1Mqwg~f|8Y^x|KJSo&VRR3o_hY1uaDjJKaV<)su`-!coud4?cLP}QTH%UIE9)tQ1fNd zl?~OvKJnP=zNeEjuSN3a4!ls^H&e?>E4sY?w5-x{(+pIUTlWj6WoufURLBNFdS7L( zsaLjTROEk6`2Wj8Kp&9*j>}m!{>R_p?lk+P z&zv?D#>&1^p^6M7RnI|kGv{QPmKGW(VFxvfruJqZBe}P)Yp$bxX(JPq%{d!)YI|?6 zm3yR;(%EOllX#ph)U6xEWSm>}&D=^WFT?%YKllFmZF3F+qAa;PkAo8t=x!K~(+_E3yp`_@fH3aQOkKEvu!D|HzdI}6)hvQr&~vu1wF#tY2X$sQ zRgZ$GUk@Oi66rj|eFGiT%wIGoFiCt9%+G}%gma5H|5d8Y^Ldtgb0YZ>G2V4~e0B1_ zvi~o9#kDoMFWwt;T;j)ZOX-TPbgH zeJz6|N>LW0at)x{0Ys?an;`0=kuuk+4{5vc23-z<6s6-)5f(K`ZpwG-@*F#e`fpJJ zp~`|AC_dsa_gqD`@dicEFB~oCZxc1aqT3)Fpnv Date: Mon, 17 Jun 2024 18:03:23 +0200 Subject: [PATCH 171/313] cleanup --- packages/syft/src/syft/service/code/user_code.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index b2612704b0f..f09c41e0768 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -124,12 +124,6 @@ class UserCodeStatusCollection(SyncableSyftObject): __version__ = SYFT_OBJECT_VERSION_1 __repr_attrs__ = ["approved", "status_dict"] - - # if len(output_history): {uid: approved}, - # if denied string is somewhere: {uid: denied} - # else: {uid: pending} - # - the object is completely different for l2/l0 - # - the interface is different (because we need context in backend to get output_history) status_dict: dict[NodeIdentity, tuple[UserCodeStatus, str]] = {} user_code_link: LinkedObject From 8dacdf1a6d66cd2d30dd5fc56fe0bd7e45604020 Mon Sep 17 00:00:00 2001 From: IonesioJunior Date: Mon, 17 Jun 2024 15:27:56 -0300 Subject: [PATCH 172/313] Allow user to set headers for domain_client --- packages/syft/src/syft/client/client.py | 43 ++++++++++++++++++++++--- 1 file changed, 39 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index f0d9c55b340..59c36487e84 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -48,6 +48,7 @@ from ..service.user.user_service import UserService from ..types.grid_url import GridURL from ..types.syft_object import SYFT_OBJECT_VERSION_2 +from ..types.syft_object import SYFT_OBJECT_VERSION_3 from ..types.uid import UID from ..util.logger import debug from ..util.telemetry import instrument @@ -130,7 +131,7 @@ class Routes(Enum): @serializable(attrs=["proxy_target_uid", "url"]) -class HTTPConnection(NodeConnection): +class HTTPConnectionV2(NodeConnection): __canonical_name__ = "HTTPConnection" __version__ = SYFT_OBJECT_VERSION_2 @@ -139,6 +140,18 @@ class HTTPConnection(NodeConnection): routes: type[Routes] = Routes session_cache: Session | None = None + +@serializable(attrs=["proxy_target_uid", "url"]) +class HTTPConnection(NodeConnection): + __canonical_name__ = "HTTPConnection" + __version__ = SYFT_OBJECT_VERSION_3 + + url: GridURL + proxy_target_uid: UID | None = None + routes: type[Routes] = Routes + session_cache: Session | None = None + headers: dict[str, str] | None = None + @field_validator("url", mode="before") @classmethod def make_url(cls, v: Any) -> Any: @@ -148,6 +161,9 @@ def make_url(cls, v: Any) -> Any: else v ) + def set_headers(self, headers: dict[str, str]) -> None: + self.headers = headers + def with_proxy(self, proxy_target_uid: UID) -> Self: return HTTPConnection(url=self.url, proxy_target_uid=proxy_target_uid) @@ -185,7 +201,11 @@ def session(self) -> Session: def _make_get(self, path: str, params: dict | None = None) -> bytes: url = self.url.with_path(path) response = self.session.get( - str(url), verify=verify_tls(), proxies={}, params=params + str(url), + headers=self.headers, + verify=verify_tls(), + proxies={}, + params=params, ) if response.status_code != 200: raise requests.ConnectionError( @@ -205,7 +225,12 @@ def _make_post( ) -> bytes: url = self.url.with_path(path) response = self.session.post( - str(url), verify=verify_tls(), json=json, proxies={}, data=data + str(url), + headers=self.headers, + verify=verify_tls(), + json=json, + proxies={}, + data=data, ) if response.status_code != 200: raise requests.ConnectionError( @@ -220,7 +245,7 @@ def _make_post( def stream_data(self, credentials: SyftSigningKey) -> Response: url = self.url.with_path(self.routes.STREAM.value) response = self.session.get( - str(url), verify=verify_tls(), proxies={}, stream=True + str(url), verify=verify_tls(), proxies={}, stream=True, headers=self.headers ) return response @@ -310,6 +335,7 @@ def make_call(self, signed_call: SignedSyftAPICall) -> Any | SyftError: response = requests.post( # nosec url=str(self.api_url), data=msg_bytes, + headers=self.headers, ) if response.status_code != 200: @@ -531,6 +557,15 @@ def post_init(self) -> None: self.metadata.supported_protocols ) + def set_headers(self, headers: dict[str, str]) -> None | SyftError: + if isinstance(self.connection, HTTPConnection): + self.connection.set_headers(headers) + return None + return SyftError( + message="Incompatible connection type." + + f"Expected HTTPConnection, got {type(self.connection)}" + ) + def _get_communication_protocol( self, protocols_supported_by_server: list ) -> int | str: From f02ea4bff400afd4d7028d31d49400532e855405 Mon Sep 17 00:00:00 2001 From: IonesioJunior Date: Mon, 17 Jun 2024 16:01:45 -0300 Subject: [PATCH 173/313] Update protocol --- packages/syft/src/syft/protocol/protocol_version.json | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 375aa1af66b..3f88ac18cf4 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -272,6 +272,13 @@ "hash": "89dbd4a810586b49498be1f5299b565a19871487e14a120433b0a4cf607b6dee", "action": "remove" } + }, + "HTTPConnection": { + "3": { + "version": 3, + "hash": "54b452bb4ab76691ac1e704b62e7bcec740850fea00805145259b37973ecd0f4", + "action": "add" + } } } } From 6071d2738e3a54566c5247565a95b1a6a817966c Mon Sep 17 00:00:00 2001 From: IonesioJunior Date: Mon, 17 Jun 2024 16:06:51 -0300 Subject: [PATCH 174/313] Fix lint issues --- packages/syft/src/syft/client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 59c36487e84..323874336f2 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -561,7 +561,7 @@ def set_headers(self, headers: dict[str, str]) -> None | SyftError: if isinstance(self.connection, HTTPConnection): self.connection.set_headers(headers) return None - return SyftError( + return SyftError( # type: ignore message="Incompatible connection type." + f"Expected HTTPConnection, got {type(self.connection)}" ) From 932220cbbec9527eca5f04182a804fde111a9c11 Mon Sep 17 00:00:00 2001 From: dk Date: Tue, 18 Jun 2024 10:50:19 +0700 Subject: [PATCH 175/313] [syft/dataset] wrap data and mock with `ActionObject.from_obj` before uploading `TwinObject` --- packages/syft/src/syft/client/domain_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/client/domain_client.py b/packages/syft/src/syft/client/domain_client.py index 1c768710040..a23a4c44170 100644 --- a/packages/syft/src/syft/client/domain_client.py +++ b/packages/syft/src/syft/client/domain_client.py @@ -129,8 +129,8 @@ def upload_dataset(self, dataset: CreateDataset) -> SyftSuccess | SyftError: for asset in dataset.asset_list: try: twin = TwinObject( - private_obj=asset.data, - mock_obj=asset.mock, + private_obj=ActionObject.from_obj(asset.data), + mock_obj=ActionObject.from_obj(asset.mock), syft_node_location=self.id, syft_client_verify_key=self.verify_key, ) From 004cde3f796e2c724861f1a0c1127c9d50c80d8e Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Tue, 18 Jun 2024 14:03:17 +1000 Subject: [PATCH 176/313] Replacing type serde --- packages/syft/src/syft/serde/third_party.py | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/packages/syft/src/syft/serde/third_party.py b/packages/syft/src/syft/serde/third_party.py index 4e94219951e..cf025aedb9c 100644 --- a/packages/syft/src/syft/serde/third_party.py +++ b/packages/syft/src/syft/serde/third_party.py @@ -183,20 +183,8 @@ def serialize_bytes_io(io: BytesIO) -> bytes: import torch from torch._C import _TensorMeta - def serialize_torch_tensor_meta(t: _TensorMeta) -> bytes: - buffer = BytesIO() - torch.save(t, buffer) - return buffer.getvalue() - - def deserialize_torch_tensor_meta(buf: bytes) -> _TensorMeta: - buffer = BytesIO(buf) - return torch.load(buffer) - - recursive_serde_register( - _TensorMeta, - serialize=serialize_torch_tensor_meta, - deserialize=deserialize_torch_tensor_meta, - ) + recursive_serde_register_type(_TensorMeta) + recursive_serde_register_type(torch.Tensor) def torch_serialize(tensor: torch.Tensor) -> bytes: return numpy_serialize(tensor.numpy()) From c4c0c48f5a8fc4e3918eb5502a9bfafd81130345 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Tue, 18 Jun 2024 08:28:13 +0200 Subject: [PATCH 177/313] rm nb --- notebooks/real_jobs_ux.ipynb | 5276 ---------------------------------- 1 file changed, 5276 deletions(-) delete mode 100644 notebooks/real_jobs_ux.ipynb diff --git a/notebooks/real_jobs_ux.ipynb b/notebooks/real_jobs_ux.ipynb deleted file mode 100644 index 24b8685e542..00000000000 --- a/notebooks/real_jobs_ux.ipynb +++ /dev/null @@ -1,5276 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "# syft absolute\n", - "import syft as sy" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Staging Protocol Changes...\n", - "Creating default worker image with tag='local-dev'\n", - "Building default worker image with tag=local-dev\n", - "Setting up worker poolname=default-pool workers=4 image_uid=533f068bce124b228c6f21262613832f in_memory=True\n", - "Created default worker pool.\n", - "Data Migrated to latest version !!!\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "22/04/24 17:10:40 FUNCTION LOG (9a1cd5d70dde491ba6900ad2d9823d91): Job Iter 0\n", - "22/04/24 17:10:40 FUNCTION LOG (9a1cd5d70dde491ba6900ad2d9823d91): Job Iter 1\n", - "22/04/24 17:10:42 FUNCTION LOG (fd88bc3e255842a49ce329069eda2f1b): Subjob Iter 0\n", - "22/04/24 17:10:43 FUNCTION LOG (ccb8f5413883402db1f3d2c8965681c9): Subjob Iter 0\n", - "22/04/24 17:10:44 FUNCTION LOG (fd88bc3e255842a49ce329069eda2f1b): Subjob Iter 1\n", - "22/04/24 17:10:44 FUNCTION LOG (ccb8f5413883402db1f3d2c8965681c9): Subjob Iter 1\n", - "22/04/24 17:10:45 FUNCTION LOG (fd88bc3e255842a49ce329069eda2f1b): Subjob Iter 2\n", - "22/04/24 17:10:46 FUNCTION LOG (ccb8f5413883402db1f3d2c8965681c9): Subjob Iter 2\n", - "22/04/24 17:10:46 FUNCTION LOG (fd88bc3e255842a49ce329069eda2f1b): Subjob Iter 3\n", - "22/04/24 17:10:47 FUNCTION LOG (ccb8f5413883402db1f3d2c8965681c9): Subjob Iter 3\n", - "22/04/24 17:10:47 FUNCTION LOG (fd88bc3e255842a49ce329069eda2f1b): Subjob Iter 4\n", - "22/04/24 17:10:48 FUNCTION LOG (ccb8f5413883402db1f3d2c8965681c9): Subjob Iter 4\n", - "22/04/24 17:10:48 FUNCTION LOG (fd88bc3e255842a49ce329069eda2f1b): Subjob Iter 5\n", - "22/04/24 17:10:49 FUNCTION LOG (ccb8f5413883402db1f3d2c8965681c9): Subjob Iter 5\n", - "22/04/24 17:10:49 FUNCTION LOG (fd88bc3e255842a49ce329069eda2f1b): Subjob Iter 6\n", - "22/04/24 17:10:50 FUNCTION LOG (ccb8f5413883402db1f3d2c8965681c9): Subjob Iter 6\n", - "22/04/24 17:10:50 FUNCTION LOG (fd88bc3e255842a49ce329069eda2f1b): Subjob Iter 7\n", - "22/04/24 17:10:51 FUNCTION LOG (ccb8f5413883402db1f3d2c8965681c9): Subjob Iter 7\n", - "22/04/24 17:10:51 FUNCTION LOG (fd88bc3e255842a49ce329069eda2f1b): Subjob Iter 8\n", - "22/04/24 17:10:52 FUNCTION LOG (ccb8f5413883402db1f3d2c8965681c9): Subjob Iter 8\n", - "22/04/24 17:10:52 FUNCTION LOG (fd88bc3e255842a49ce329069eda2f1b): Subjob Iter 9\n", - "22/04/24 17:10:53 FUNCTION LOG (ccb8f5413883402db1f3d2c8965681c9): Subjob Iter 9\n" - ] - } - ], - "source": [ - "node = sy.orchestra.launch(\n", - " name=\"test\",\n", - " dev_mode=True,\n", - " reset=True,\n", - " local_db=True,\n", - " n_consumers=4,\n", - " create_producer=True,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "domain_client = node.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "# domain_client = sy.login(email=\"info@openmined.org\", password=\"changethis\", port=8080)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'subjob' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'subjob' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@sy.syft_function()\n", - "def subjob(domain):\n", - " # stdlib\n", - " import time\n", - "\n", - " n_iters = 10\n", - " domain.init_progress(n_iters=n_iters)\n", - " for i in range(n_iters):\n", - " time.sleep(1)\n", - " print(f\"Subjob Iter {i}\")\n", - " domain.increment_progress()\n", - " return \"Done\"" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: User Code Submitted

" - ], - "text/plain": [ - "SyftSuccess: User Code Submitted" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "domain_client.code.submit(subjob)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess: Syft function 'job' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'job' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "@sy.syft_function_single_use()\n", - "def job(domain):\n", - " # stdlib\n", - " import time\n", - "\n", - " n_iters = 2\n", - " domain.init_progress(n_iters=n_iters)\n", - " for i in range(n_iters):\n", - " _ = domain.launch_job(subjob)\n", - " time.sleep(0.1)\n", - " print(f\"Job Iter {i}\")\n", - " domain.increment_progress()\n", - " return \"Done\"" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - "
\n", - "

Request

\n", - "

Id: 9a38e069840b4c42a45b24d6dcf0dd89

\n", - "

Request time: 2024-04-22 13:10:38

\n", - " \n", - " \n", - "

Status: RequestStatus.PENDING

\n", - "

Requested on: Test of type Domain

\n", - "

Requested by: Jane Doe (info@openmined.org)

\n", - "

Changes: Request to change job (Pool Id: default-pool) to permission RequestStatus.APPROVED. Nested Requests not resolved.

\n", - "
\n", - "\n", - " " - ], - "text/markdown": [ - "```python\n", - "class Request:\n", - " id: str = 9a38e069840b4c42a45b24d6dcf0dd89\n", - " request_time: str = 2024-04-22 13:10:38\n", - " updated_at: str = None\n", - " status: str = RequestStatus.PENDING\n", - " changes: str = ['Request to change job (Pool Id: default-pool) to permission RequestStatus.APPROVED. Nested Requests not resolved']\n", - " requesting_user_verify_key: str = 67d3b5eaf0c0bf6b5a602d359daecc86a7a74053490ec37ae08e71360587c870\n", - "\n", - "```" - ], - "text/plain": [ - "syft.service.request.request.Request" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "domain_client.code.request_code_execution(job)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Approving request for domain test\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess: Request 9a38e069840b4c42a45b24d6dcf0dd89 changes applied

" - ], - "text/plain": [ - "SyftSuccess: Request 9a38e069840b4c42a45b24d6dcf0dd89 changes applied" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "domain_client.requests[-1].approve(approve_nested=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "res = domain_client.code.job(blocking=False)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - "\n", - "\n", - " \n", - "\n", - "\n", - "\n", - " \n", - " \n", - " \n", - "
\n", - "
test/jobs/
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - " JOB\n", - "
\n", - " job\n", - "
\n", - " \n", - " \n", - "
\n", - " #9a1cd5d70dde491ba6900ad2d9823d91\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " UserCode:\n", - " job\n", - "
\n", - "
\n", - " Status:\n", - " Created\n", - "
\n", - "
\n", - " \n", - " Started At:\n", - " 2024-04-22 17:10:39 by Jane Doe info@openmined.org\n", - "
\n", - "
\n", - " \n", - " Updated At:\n", - " --\n", - "
\n", - " \n", - "
\n", - " Subjobs:\n", - " 0\n", - "
\n", - "
\n", - "
\n", - " \n", - " \n", - "
\n", - "
\n", - " \n", - " \n", - "
\n", - "
\n", - "
\n", - "
\n", - " syft.service.action.action_data_empty.ObjectNotReady\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
\n", - " " - ], - "text/markdown": [ - "```python\n", - "class Job:\n", - " id: UID = 9a1cd5d70dde491ba6900ad2d9823d91\n", - " status: JobStatus.CREATED\n", - " has_parent: False\n", - " result: syft.service.action.action_data_empty.ObjectNotReady\n", - " logs:\n", - "\n", - "0 \n", - " \n", - "```" - ], - "text/plain": [ - "syft.service.job.job_stash.Job" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "res" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "x = res.wait()" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "```python\n", - "Pointer\n", - "'Done'```\n" - ], - "text/plain": [ - "Pointer:\n", - "'Done'" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "x" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "

Job List

\n", - "
\n", - "\n", - "
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - "\n", - "

0

\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n" - ], - "text/plain": [ - "" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "domain_client.jobs" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "

Job List

\n", - "
\n", - "\n", - "
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - "\n", - "

0

\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n" - ], - "text/plain": [ - "" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "domain_client.jobs" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [], - "source": [ - "subjob = domain_client.jobs[1]" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - "\n", - "\n", - " \n", - "\n", - "\n", - "\n", - " \n", - " \n", - " \n", - "
\n", - "
test/jobs/
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - " JOB\n", - "
\n", - " job\n", - "
\n", - " \n", - " \n", - "
\n", - " #9a1cd5d70dde491ba6900ad2d9823d91\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " UserCode:\n", - " job\n", - "
\n", - "
\n", - " Status:\n", - " Completed\n", - "
\n", - "
\n", - " \n", - " Started At:\n", - " 2024-04-22 17:10:39 by Jane Doe info@openmined.org\n", - "
\n", - "
\n", - " \n", - " Updated At:\n", - " --\n", - "
\n", - " \n", - "
\n", - " Subjobs:\n", - " 2\n", - "
\n", - "
\n", - "
\n", - " \n", - " \n", - "
\n", - "
\n", - " \n", - " \n", - "
\n", - "
\n", - "
\n", - "
\n", - " Done\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "
\n", - " \n", - " #\n", - " \n", - " \n", - " \n", - " Message\n", - " \n", - "
\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
\n", - " " - ], - "text/markdown": [ - "```python\n", - "class Job:\n", - " id: UID = 9a1cd5d70dde491ba6900ad2d9823d91\n", - " status: JobStatus.COMPLETED\n", - " has_parent: False\n", - " result: Done\n", - " logs:\n", - "\n", - "0 Job Iter 0\n", - "1 Job Iter 1\n", - "JOB COMPLETED\n", - " \n", - "```" - ], - "text/plain": [ - "syft.service.job.job_stash.Job" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "res" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Job Iter 0\n", - "Job Iter 1\n", - "\n", - "\n" - ] - } - ], - "source": [ - "res.logs()" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [], - "source": [ - "j = domain_client.jobs[1]" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - "\n", - "\n", - " \n", - "\n", - "\n", - "\n", - " \n", - " \n", - " \n", - "
\n", - "
test/jobs/job
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - " SUBJOB\n", - "
\n", - " subjob\n", - "
\n", - " \n", - " \n", - "
\n", - " #fd88bc3e255842a49ce329069eda2f1b\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " UserCode:\n", - " subjob\n", - "
\n", - "
\n", - " Status:\n", - " Processing\n", - "
\n", - "
\n", - " \n", - " Started At:\n", - " 2024-04-22 17:10:41 by Jane Doe info@openmined.org\n", - "
\n", - "
\n", - " \n", - " Updated At:\n", - " 2024-04-22 1\n", - "
\n", - " \n", - "
\n", - " \n", - " Worker Pool:\n", - " default-pool-2 on worker \n", - " \n", - "
\n", - " #default-pool\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " Subjobs:\n", - " 0\n", - "
\n", - "
\n", - "
\n", - " \n", - " \n", - "
\n", - "
\n", - " \n", - " \n", - "
\n", - "
\n", - "
\n", - "
\n", - " syft.service.action.action_data_empty.ObjectNotReady\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "
\n", - " \n", - " #\n", - " \n", - " \n", - " \n", - " Message\n", - " \n", - "
\n", - "
\n", - " 0\n", - "
\n", - "
\n", - "
\n", - " Job Iter 0\n", - "
\n", - "
\n", - "
\n", - " 1\n", - "
\n", - "
\n", - "
\n", - " Job Iter 1\n", - "
\n", - "
\n", - "
\n", - " 2\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
\n", - " " - ], - "text/markdown": [ - "```python\n", - "class Job:\n", - " id: UID = fd88bc3e255842a49ce329069eda2f1b\n", - " status: JobStatus.PROCESSING\n", - " has_parent: True\n", - " result: syft.service.action.action_data_empty.ObjectNotReady\n", - " logs:\n", - "\n", - "0 Subjob Iter 0\n", - "1 Subjob Iter 1\n", - "2 Subjob Iter 2\n", - " \n", - "```" - ], - "text/plain": [ - "syft.service.job.job_stash.Job" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "j" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "# subjob" - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "
\n", - "

Job List

\n", - "
\n", - "\n", - "
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - "\n", - "

0

\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n" - ], - "text/plain": [ - "[syft.service.job.job_stash.Job,\n", - " syft.service.job.job_stash.Job,\n", - " syft.service.job.job_stash.Job,\n", - " syft.service.job.job_stash.Job,\n", - " syft.service.job.job_stash.Job,\n", - " syft.service.job.job_stash.Job,\n", - " syft.service.job.job_stash.Job,\n", - " syft.service.job.job_stash.Job,\n", - " syft.service.job.job_stash.Job,\n", - " syft.service.job.job_stash.Job]" - ] - }, - "execution_count": 27, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "res.subjobs" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.2" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": {}, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} From 34b8a846bf80d5e2035c71e1bd927e2323ab2073 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Tue, 18 Jun 2024 09:09:11 +0200 Subject: [PATCH 178/313] add code to repr --- packages/syft/src/syft/client/api.py | 4 +++- packages/syft/src/syft/service/code/user_code.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index cc4c14748fb..37e176439e4 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -1183,7 +1183,9 @@ def __repr__(self) -> str: if hasattr(module_or_func, "_modules"): for func_name in module_or_func._modules: func = getattr(module_or_func, func_name) - sig = func.__ipython_inspector_signature_override__ + sig = getattr( + func, "__ipython_inspector_signature_override__", "" + ) _repr_str += f"{module_path_str}.{func_name}{sig}\n\n" return _repr_str diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 832379fb869..244fb162d19 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -305,6 +305,7 @@ class UserCode(SyncableSyftObject): "input_owners", "code_status", "worker_pool_name", + "raw_code", ] __exclude_sync_diff_attrs__: ClassVar[list[str]] = [ From f3b3a0e57bdacacd5a55e93eb3444c889d69bb94 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 18 Jun 2024 10:20:09 +0300 Subject: [PATCH 179/313] change low_side to l0_deployment --- packages/syft/src/syft/client/syncing.py | 10 --------- .../syft/src/syft/service/code/user_code.py | 20 +++++++++++------ .../syft/service/code/user_code_service.py | 19 +++++++++------- .../syft/src/syft/service/request/request.py | 22 ++++++++++--------- 4 files changed, 36 insertions(+), 35 deletions(-) diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index 25dea439e8f..156866b26ff 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -129,15 +129,6 @@ def handle_sync_batch( src_client = obj_diff_batch.source_client tgt_client = obj_diff_batch.target_client - - # make sure dependent request is approved before syncing the job - # if obj_diff_batch.root_type == Job and sync_direction == SyncDirection.HIGH_TO_LOW: - # job = obj_diff_batch.root.get_obj() - # requests = [r for r in src_client.requests if r.code_id == job.user_code_id] - # # NOTE: how to handle 0 or multiple requests? - # if requests: - # requests[0].approve() - src_resolved_state, tgt_resolved_state = obj_diff_batch.create_new_resolved_states() obj_diff_batch.decision = decision @@ -152,7 +143,6 @@ def handle_sync_batch( or obj_diff_batch.user_verify_key_high ) share_private_data_for_diff = share_private_data[diff.object_id] - # add sync private option to execution output mockify_for_diff = mockify[diff.object_id] instruction = SyncInstruction.from_batch_decision( diff=diff, diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index cacf1aa3db3..7fcc7943583 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -62,6 +62,7 @@ from ...util.colors import SURFACE from ...util.markdown import CodeMarkdown from ...util.markdown import as_markdown_code +from ...util.util import prompt_warning_message from ..action.action_endpoint import CustomEndpointActionObject from ..action.action_object import Action from ..action.action_object import ActionObject @@ -395,11 +396,11 @@ def _coll_repr_(self) -> dict[str, Any]: } @property - def is_low_side(self) -> bool: + def is_l0_deployment(self) -> bool: return self.origin_node_side_type == NodeSideType.LOW_SIDE @property - def is_high_side(self) -> bool: + def is_l2_deployment(self) -> bool: return self.origin_node_side_type == NodeSideType.HIGH_SIDE @property @@ -414,7 +415,7 @@ def user(self) -> UserView | SyftError: ) return api.services.user.get_by_verify_key(self.user_verify_key) - def _status_from_output_history( + def _compute_status_l0( self, context: AuthedServiceContext | None = None ) -> UserCodeStatusCollection | SyftError: if context is None: @@ -439,6 +440,11 @@ def _status_from_output_history( is_denied = self.l0_deny_reason is not None if is_denied: + if is_approved: + prompt_warning_message( + "This request has been approved and " + "the output is shared with the owner of the request." + ) message = self.l0_deny_reason status = (UserCodeStatus.DENIED, message) elif is_approved: @@ -456,12 +462,12 @@ def _status_from_output_history( def status(self) -> UserCodeStatusCollection | SyftError: # Clientside only - if self.is_low_side: + if self.is_l0_deployment: if self.status_link is not None: return SyftError( message="Encountered a low side UserCode object with a status_link." ) - return self._status_from_output_history() + return self._compute_status_l0() if self.status_link is None: return SyftError( @@ -473,12 +479,12 @@ def status(self) -> UserCodeStatusCollection | SyftError: def get_status( self, context: AuthedServiceContext ) -> UserCodeStatusCollection | SyftError: - if self.origin_node_side_type == NodeSideType.LOW_SIDE: + if self.is_l0_deployment: if self.status_link is not None: return SyftError( message="Encountered a low side UserCode object with a status_link." ) - return self._status_from_output_history(context) + return self._compute_status_l0(context) if self.status_link is None: return SyftError( message="This UserCode does not have a status. Please contact the Admin." diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index b6970d31a03..9287e49fec4 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -227,7 +227,7 @@ def _request_code_execution_inner( code_link = LinkedObject.from_obj(user_code, node_uid=context.node.id) # Requests made on low side are synced, and have their status computed instead of set manually. - if user_code.is_low_side: + if user_code.is_l0_deployment: status_change = SyncedUserCodeStatusChange( value=UserCodeStatus.APPROVED, linked_obj=user_code.status_link, @@ -517,18 +517,16 @@ def _call( output_policy=output_policy, ) if not can_execute: - # We don't check output policy if + # We check output policy only in l2 deployment. # code is from low side (L0 setup) - if not code.is_low_side and not code.is_output_policy_approved( - context - ): + if not code.is_output_policy_approved(context): return Err( "Execution denied: Your code is waiting for approval" ) is_valid = ( output_policy._is_valid(context) if output_policy else False ) - if not is_valid or code.is_low_side: + if not is_valid or code.is_l0_deployment: if len(output_history) > 0 and not skip_read_cache: last_executed_output = output_history[-1] # Check if the inputs of the last executed output match @@ -555,10 +553,15 @@ def _call( return result res = delist_if_single(result.ok()) + output_policy_message = "" + if code.is_l2_deployment: + # Skip output policy warning in L0 setup; + # admin overrides policy checks. + output_policy_message = is_valid.message return Ok( CachedSyftObject( result=res, - error_msg="", + error_msg=output_policy_message, ) ) else: @@ -592,7 +595,7 @@ def _call( # this currently only works for nested syft_functions # and admins executing on high side (TODO, decide if we want to increment counter) # always store_execution_output on l0 setup - is_l0_request = context.role == ServiceRole.ADMIN and code.is_low_side + is_l0_request = context.role == ServiceRole.ADMIN and code.is_l0_deployment if not skip_fill_cache and output_policy is not None or is_l0_request: res = code.store_execution_output( context=context, diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 88f9e0df271..bde75c1a1d3 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -579,8 +579,10 @@ def icon(self) -> str: return Icon.REQUEST.svg def get_status(self, context: AuthedServiceContext | None = None) -> RequestStatus: - is_low_side = self.get_is_low_side(context) if context else self.is_low_side - if is_low_side: + is_l0_deployment = ( + self.get_is_l0_deployment(context) if context else self.is_l0_deployment + ) + if is_l0_deployment: code_status = self.code.get_status(context) if context else self.code.status return RequestStatus.from_usercode_status(code_status) @@ -611,7 +613,7 @@ def approve( if isinstance(api, SyftError): return api - if self.is_low_side: + if self.is_l0_deployment: return SyftError( message="This request is a low-side request. Please sync your results to approve." ) @@ -662,7 +664,7 @@ def deny(self, reason: str) -> SyftSuccess | SyftError: if isinstance(api, SyftError): return api - if self.is_low_side: + if self.is_l0_deployment: result = api.code.update(id=self.code_id, l0_deny_reason=reason) if isinstance(result, SyftError): return result @@ -671,18 +673,18 @@ def deny(self, reason: str) -> SyftSuccess | SyftError: return api.services.request.undo(uid=self.id, reason=reason) @property - def is_low_side(self) -> bool: - return bool(self.code) and self.code.is_low_side + def is_l0_deployment(self) -> bool: + return bool(self.code) and self.code.is_l0_deployment - def get_is_low_side(self, context: AuthedServiceContext) -> bool: + def get_is_l0_deployment(self, context: AuthedServiceContext) -> bool: code = self.get_user_code(context) if code: - return code.is_low_side + return code.is_l0_deployment else: return False def approve_with_client(self, client: SyftClient) -> Result[SyftSuccess, SyftError]: - if self.is_low_side: + if self.is_l0_deployment: return SyftError( message="This request is a low-side request. Please sync your results to approve." ) @@ -845,7 +847,7 @@ def deposit_result( if isinstance(code, SyftError): return code - if not self.is_low_side: + if not self.is_l0_deployment: return SyftError( message="deposit_result is only available for low side code requests. " "Please use request.approve() instead." From 2a0a4eb299cdebca0033267b14e2823d7910bca7 Mon Sep 17 00:00:00 2001 From: dk Date: Tue, 18 Jun 2024 15:18:51 +0700 Subject: [PATCH 180/313] [syft/tests] test uploading assetes with different data types: int | str | dict | set | np.ndarray | pd.DataFrame | torch.Tensor --- .../service/dataset/dataset_service_test.py | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/packages/syft/tests/syft/service/dataset/dataset_service_test.py b/packages/syft/tests/syft/service/dataset/dataset_service_test.py index a60bc653c13..1aa85c07865 100644 --- a/packages/syft/tests/syft/service/dataset/dataset_service_test.py +++ b/packages/syft/tests/syft/service/dataset/dataset_service_test.py @@ -5,8 +5,10 @@ # third party import numpy as np +import pandas as pd from pydantic import ValidationError import pytest +import torch # syft absolute import syft as sy @@ -253,3 +255,49 @@ def test_adding_contributors_with_duplicate_email(): assert isinstance(res3, SyftSuccess) assert isinstance(res4, SyftError) assert len(asset.contributors) == 1 + + +@pytest.fixture( + params=[ + 1, + "hello", + {"key": "value"}, + {1, 2, 3}, + np.array([1, 2, 3]), + pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]}), + torch.Tensor([1, 2, 3]), + ] +) +def different_data_types( + request, +) -> int | str | dict | set | np.ndarray | pd.DataFrame | torch.Tensor: + return request.param + + +def test_upload_dataset_with_assets_of_different_data_types( + worker: Worker, + different_data_types: int + | str + | dict + | set + | np.ndarray + | pd.DataFrame + | torch.Tensor, +) -> None: + asset = sy.Asset( + name=random_hash(), + data=different_data_types, + mock=different_data_types, + ) + dataset = Dataset(name=random_hash()) + dataset.add_asset(asset) + root_domain_client = worker.root_client + res = root_domain_client.upload_dataset(dataset) + assert isinstance(res, SyftSuccess) + assert len(root_domain_client.api.services.dataset.get_all()) == 1 + assert type(root_domain_client.datasets[0].assets[0].data) == type( + different_data_types + ) + assert type(root_domain_client.datasets[0].assets[0].mock) == type( + different_data_types + ) From 9559298d78eb8e259daef2d7e8b2caeb40d320fe Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 18 Jun 2024 11:37:37 +0300 Subject: [PATCH 181/313] add warning message --- packages/syft/src/syft/service/code/user_code.py | 4 ++-- packages/syft/src/syft/service/request/request.py | 5 +++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 657e2c29eec..9ee7e09e7e9 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -443,8 +443,8 @@ def _compute_status_l0( if is_denied: if is_approved: prompt_warning_message( - "This request has been approved and " - "the output is shared with the owner of the request." + "This request already has results published to the data scientist. " + "They will still be able to access those results." ) message = self.l0_deny_reason status = (UserCodeStatus.DENIED, message) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index bde75c1a1d3..6ec41fd1bdb 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -665,6 +665,11 @@ def deny(self, reason: str) -> SyftSuccess | SyftError: return api if self.is_l0_deployment: + if self.status == RequestStatus.APPROVED: + prompt_warning_message( + "This request already has results published to the data scientist. " + "They will still be able to access those results." + ) result = api.code.update(id=self.code_id, l0_deny_reason=reason) if isinstance(result, SyftError): return result From 3dc46bf781c6aeff034a51abc7c9e1b565232f15 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Tue, 18 Jun 2024 12:55:09 +0200 Subject: [PATCH 182/313] add test --- .../syft/src/syft/service/code/user_code.py | 74 +++++++++++++++---- packages/syft/src/syft/service/response.py | 4 +- .../syft/tests/syft/users/user_code_test.py | 35 +++++++++ 3 files changed, 99 insertions(+), 14 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 9ee7e09e7e9..f771cf9a9c5 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -12,6 +12,7 @@ import inspect from io import StringIO import itertools +import keyword import random import sys from textwrap import dedent @@ -26,8 +27,11 @@ # third party from IPython.display import display +from pydantic import ValidationError from pydantic import field_validator from result import Err +from result import Ok +from result import Result from typing_extensions import Self # relative @@ -90,6 +94,7 @@ from ..response import SyftNotReady from ..response import SyftSuccess from ..response import SyftWarning +from ..service import ServiceConfigRegistry from ..user.user import UserView from ..user.user_roles import ServiceRole from .code_parse import GlobalsVisitor @@ -363,6 +368,14 @@ class UserCode(SyncableSyftObject): "output_policy_state", ] + @field_validator("service_func_name", mode="after") + @classmethod + def service_func_name_is_valid(cls, value: str) -> str: + res = is_valid_usercode_name(value) + if res.is_err(): + raise ValueError(res.err_value) + return value + def __setattr__(self, key: str, value: Any) -> None: # Get the attribute from the class, it might be a descriptor or None attr = getattr(type(self), key, None) @@ -908,6 +921,14 @@ class SubmitUserCode(SyftObject): __repr_attrs__ = ["func_name", "code"] + @field_validator("func_name", mode="after") + @classmethod + def func_name_is_valid(cls, value: str) -> str: + res = is_valid_usercode_name(value) + if res.is_err(): + raise ValueError(res.err_value) + return value + @field_validator("output_policy_init_kwargs", mode="after") @classmethod def add_output_policy_ids(cls, values: Any) -> Any: @@ -1070,6 +1091,24 @@ def input_owner_verify_keys(self) -> list[str] | None: return None +def is_valid_usercode_name(func_name: str) -> Result[Any, str]: + if len(func_name) == 0: + return Err("Function name cannot be empty") + if func_name == "_": + return Err("Cannot use anonymous function as syft function") + if not str.isidentifier(func_name): + return Err("Function name must be a valid Python identifier") + if keyword.iskeyword(func_name): + return Err("Function name is a reserved python keyword") + + service_method_path = f"code.{func_name}" + if ServiceConfigRegistry.path_exists(service_method_path): + return Err( + f"Could not create syft function with name {func_name}: a service with the same name already exists" + ) + return Ok(None) + + class ArgumentType(Enum): REAL = 1 MOCK = 2 @@ -1128,19 +1167,28 @@ def syft_function( else: output_policy_type = type(output_policy) - def decorator(f: Any) -> SubmitUserCode: - res = SubmitUserCode( - code=dedent(inspect.getsource(f)), - func_name=f.__name__, - signature=inspect.signature(f), - input_policy_type=input_policy_type, - input_policy_init_kwargs=init_input_kwargs, - output_policy_type=output_policy_type, - output_policy_init_kwargs=getattr(output_policy, "init_kwargs", {}), - local_function=f, - input_kwargs=f.__code__.co_varnames[: f.__code__.co_argcount], - worker_pool_name=worker_pool_name, - ) + def decorator(f: Any) -> SubmitUserCode | SyftError: + try: + res = SubmitUserCode( + code=dedent(inspect.getsource(f)), + func_name=f.__name__, + signature=inspect.signature(f), + input_policy_type=input_policy_type, + input_policy_init_kwargs=init_input_kwargs, + output_policy_type=output_policy_type, + output_policy_init_kwargs=getattr(output_policy, "init_kwargs", {}), + local_function=f, + input_kwargs=f.__code__.co_varnames[: f.__code__.co_argcount], + worker_pool_name=worker_pool_name, + ) + except ValidationError as e: + errors = e.errors() + msg = "Failed to create syft function, encountered validation errors:\n" + for error in errors: + msg += f"\t{error['msg']}\n" + err = SyftError(message=msg) + display(err) + return err if share_results_with_owners and res.output_policy_init_kwargs is not None: res.output_policy_init_kwargs["output_readers"] = ( diff --git a/packages/syft/src/syft/service/response.py b/packages/syft/src/syft/service/response.py index 37227046c5c..723970cdfff 100644 --- a/packages/syft/src/syft/service/response.py +++ b/packages/syft/src/syft/service/response.py @@ -42,7 +42,9 @@ def _repr_html_class_(self) -> str: def _repr_html_(self) -> str: return ( f'
' - + f"{type(self).__name__}: {self.message}

" + f"{type(self).__name__}: " + f'
'
+            f"{self.message}

" ) diff --git a/packages/syft/tests/syft/users/user_code_test.py b/packages/syft/tests/syft/users/user_code_test.py index f006525097e..333d246e37f 100644 --- a/packages/syft/tests/syft/users/user_code_test.py +++ b/packages/syft/tests/syft/users/user_code_test.py @@ -4,6 +4,8 @@ # third party from faker import Faker import numpy as np +from pydantic import ValidationError +import pytest # syft absolute import syft as sy @@ -12,6 +14,7 @@ from syft.service.request.request import Request from syft.service.request.request import UserCodeStatusChange from syft.service.response import SyftError +from syft.service.response import SyftSuccess from syft.service.user.user import User @@ -331,3 +334,35 @@ def compute_sum(): result = ds_client.api.services.code.compute_sum() assert result, result assert result.get() == 1 + + +def test_submit_invalid_name(worker) -> None: + client = worker.root_client + + @sy.syft_function_single_use() + def valid_name(): + pass + + res = client.code.submit(valid_name) + assert isinstance(res, SyftSuccess) + + @sy.syft_function_single_use() + def get_all(): + pass + + assert isinstance(get_all, SyftError) + + @sy.syft_function_single_use() + def _(): + pass + + assert isinstance(_, SyftError) + + # overwrite valid function name before submit, fail on serde + @sy.syft_function_single_use() + def valid_name_2(): + pass + + valid_name_2.func_name = "get_all" + with pytest.raises(ValidationError): + client.code.submit(valid_name_2) From db1e59bb70e8c122da3d4b8c5e547bcff57b4d78 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Tue, 18 Jun 2024 13:05:43 +0200 Subject: [PATCH 183/313] fix test --- .../tests/syft/service/sync/sync_resolve_single_test.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py b/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py index 868f9f5203d..0bd022ae604 100644 --- a/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py +++ b/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py @@ -156,7 +156,7 @@ def compute() -> int: _ = client_low_ds.code.request_code_execution(compute) - diff = compare_clients(low_client, high_client) + diff = compare_clients(low_client, high_client, hide_usercode=False) assert len(diff.batches) == 2 # Request + UserCode assert len(diff.ignored_batches) == 0 @@ -165,7 +165,7 @@ def compute() -> int: res = diff[0].ignore() assert isinstance(res, SyftSuccess) - diff = compare_clients(low_client, high_client) + diff = compare_clients(low_client, high_client, hide_usercode=False) assert len(diff.batches) == 0 assert len(diff.ignored_batches) == 2 assert len(diff.all_batches) == 2 @@ -174,7 +174,7 @@ def compute() -> int: res = diff.ignored_batches[0].unignore() assert isinstance(res, SyftSuccess) - diff = compare_clients(low_client, high_client) + diff = compare_clients(low_client, high_client, hide_usercode=False) assert len(diff.batches) == 1 assert len(diff.ignored_batches) == 1 assert len(diff.all_batches) == 2 From 996a3d27df3cbd193f23e3ddb47f08f57c76a452 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Tue, 18 Jun 2024 16:20:15 +0200 Subject: [PATCH 184/313] add sync function, make functions available in toplevel module --- packages/syft/src/syft/__init__.py | 3 +++ packages/syft/src/syft/client/syncing.py | 26 ++++++++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index d7183898935..5e85ea120e4 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -22,6 +22,9 @@ from .client.registry import NetworkRegistry # noqa: F401 from .client.search import Search # noqa: F401 from .client.search import SearchResults # noqa: F401 +from .client.syncing import compare_clients # noqa: F401 +from .client.syncing import compare_states # noqa: F401 +from .client.syncing import sync # noqa: F401 from .client.user_settings import UserSettings # noqa: F401 from .client.user_settings import settings # noqa: F401 from .custom_worker.config import DockerWorkerConfig # noqa: F401 diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index c0b4dd8196e..f6b07fc8da9 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -22,6 +22,32 @@ from .sync_decision import SyncDirection +def sync( + from_client: DomainClient, + to_client: DomainClient, + include_ignored: bool = False, + include_same: bool = False, + filter_by_email: str | None = None, + include_types: Collection[str | type] | None = None, + exclude_types: Collection[str | type] | None = None, + hide_usercode: bool = True, +) -> PaginatedResolveWidget | SyftError | SyftSuccess: + diff = compare_clients( + from_client=from_client, + to_client=to_client, + include_ignored=include_ignored, + include_same=include_same, + filter_by_email=filter_by_email, + include_types=include_types, + exclude_types=exclude_types, + hide_usercode=hide_usercode, + ) + if isinstance(diff, SyftError): + return diff + + return diff.resolve() + + def compare_states( from_state: SyncState, to_state: SyncState, From ec4cab59dc3e67d887c9de32629db643fa6f100d Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Tue, 18 Jun 2024 16:25:22 +0200 Subject: [PATCH 185/313] update usercode warning --- packages/syft/src/syft/client/syncing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index f6b07fc8da9..1a4add6f359 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -80,8 +80,8 @@ def compare_states( if hide_usercode: prompt_warning_message( - "User code is hidden by default, as they are also part of the Request." - " If you want to include them, set hide_usercode=False." + "UserCodes are hidden by default, and are part of the Requests." + " If you want to include them as separate objects, set `hide_usercode=False`" ) exclude_types = exclude_types or [] exclude_types.append("usercode") From 1117d97c4dd0075517312776e1b162507d3e9909 Mon Sep 17 00:00:00 2001 From: Julian Cardonnet Date: Tue, 18 Jun 2024 11:31:42 -0300 Subject: [PATCH 186/313] Add livenessProbe and startupProbe to worker pool containers --- .../syft/src/syft/custom_worker/runner_k8s.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/packages/syft/src/syft/custom_worker/runner_k8s.py b/packages/syft/src/syft/custom_worker/runner_k8s.py index 3e739ef4fdb..edd7e5892bf 100644 --- a/packages/syft/src/syft/custom_worker/runner_k8s.py +++ b/packages/syft/src/syft/custom_worker/runner_k8s.py @@ -230,6 +230,24 @@ def _create_stateful_set( "image": tag, "env": env_vars, "volumeMounts": volume_mounts, + "livenessProbe": { + "httpGet": { + "path": "/api/v2/?probe=livenessProbe", + "port": 80, + }, + "initialDelaySeconds": 30, + "periodSeconds": 15, + "timeoutSeconds": 5, + "failureThreshold": 3, + }, + "startupProbe": { + "httpGet": { + "path": "/api/v2/metadata?probe=startupProbe", + "port": 80, + }, + "failureThreshold": 30, + "periodSeconds": 10, + }, } ], "volumes": volumes, From 011da3c1676fe7875c998170a8bb38735a6e8a1b Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 18 Jun 2024 18:06:41 +0300 Subject: [PATCH 187/313] implement UserCode.__call__ --- .../syft/src/syft/service/code/user_code.py | 18 +++++++++++++++++- .../syft/service/code_history/code_history.py | 6 +++++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 9ee7e09e7e9..57943fd8b81 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -33,7 +33,7 @@ # relative from ...abstract_node import NodeSideType from ...abstract_node import NodeType -from ...client.api import APIRegistry +from ...client.api import APIRegistry, generate_remote_function from ...client.api import NodeIdentity from ...client.enclave_client import EnclaveMetadata from ...node.credentials import SyftVerifyKey @@ -879,6 +879,22 @@ def show_code_cell(self) -> None: ip = get_ipython() ip.set_next_input(warning_message + self.raw_code) + def __call__(self, *args: Any, **kwargs: Any) -> Any: + api = self._get_api() + if isinstance(api, SyftError): + return api + remote_user_function = generate_remote_function( + api=api, + node_uid=self.node_uid, + signature=self.signature, + path="code.call", + make_call=api.make_call, + pre_kwargs={"uid": self.id}, + warning=None, + communication_protocol=api.communication_protocol, + ) + return remote_user_function() + class UserCodeUpdate(PartialSyftObject): __canonical_name__ = "UserCodeUpdate" diff --git a/packages/syft/src/syft/service/code_history/code_history.py b/packages/syft/src/syft/service/code_history/code_history.py index b5e893c87bf..55f041572f6 100644 --- a/packages/syft/src/syft/service/code_history/code_history.py +++ b/packages/syft/src/syft/service/code_history/code_history.py @@ -75,7 +75,11 @@ def __getitem__(self, index: int | str) -> UserCode | SyftError: return SyftError( message=f"Can't access the api. You must login to {self.node_uid}" ) - if api.user_role.value >= ServiceRole.DATA_OWNER.value and index < 0: + if ( + api.user.get_current_user().role.value >= ServiceRole.DATA_OWNER.value + and index < 0 + ): + # negative index would dynamically resolve to a different version return SyftError( message="For security concerns we do not allow negative indexing. \ Try using absolute values when indexing" From cadea3d5fb762f3a6e92844357e718b40efd625b Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Tue, 18 Jun 2024 18:20:46 +0200 Subject: [PATCH 188/313] init function factories --- packages/syft/src/syft/service/api/api.py | 27 ++++++++++++++++--- .../syft/src/syft/service/code/user_code.py | 9 +++++-- 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/packages/syft/src/syft/service/api/api.py b/packages/syft/src/syft/service/api/api.py index 6977f29e8c7..27b84da1ef8 100644 --- a/packages/syft/src/syft/service/api/api.py +++ b/packages/syft/src/syft/service/api/api.py @@ -1,6 +1,7 @@ # stdlib import ast from collections.abc import Callable +import linecache import inspect from inspect import Signature import keyword @@ -16,6 +17,7 @@ from result import Err from result import Ok from result import Result +from syft.client.client import SyftClient # relative from ...abstract_node import AbstractNode @@ -54,6 +56,7 @@ class TwinAPIAuthedContext(AuthedServiceContext): settings: dict[str, Any] | None = None code: HelperFunctionSet | None = None state: dict[Any, Any] | None = None + admin_client: SyftClient | None = None @serializable() @@ -72,6 +75,14 @@ def get_signature(func: Callable) -> Signature: sig = signature_remove_context(sig) return sig +def register_fn_in_linecache(fname: str, src: str) -> None: + """adds a function to linecache, such that inspect.getsource works for functions nested in this function. + This only works if the same function is compiled under the same filename""" + lines = [ + line + "\n" for line in src.splitlines() + ] # use same splitting method same as linecache 112 (py3.12) + linecache.cache[fname] = (137, None, lines, fname) + @serializable() class TwinAPIEndpointView(SyftObject): @@ -191,7 +202,8 @@ def update_state(self, state: dict[Any, Any]) -> None: self.state = state def build_internal_context( - self, context: AuthedServiceContext + self, context: AuthedServiceContext, + admin_client: SyftClient | None, ) -> TwinAPIAuthedContext: helper_function_dict: dict[str, Callable] = {} self.helper_functions = self.helper_functions or {} @@ -220,6 +232,7 @@ def build_internal_context( code=helper_function_set, state=self.state or {}, user=user, + admin_client=admin_client, ) def __call__(self, *args: Any, **kwargs: Any) -> Any: @@ -465,6 +478,11 @@ def exec_private_function( return SyftError(message="You're not allowed to run this code.") + def get_admin_client_from_node(self, context: AuthedServiceContext) -> SyftClient: + admin_client = context.node.get_guest_client() + admin_client.credentials = context.node.signing_key + return admin_client + def exec_code( self, code: PrivateAPIEndpoint | PublicAPIEndpoint, @@ -476,12 +494,15 @@ def exec_code( inner_function = ast.parse(code.api_code).body[0] inner_function.decorator_list = [] # compile the function - raw_byte_code = compile(ast.unparse(inner_function), "", "exec") + src = ast.unparse(inner_function) + raw_byte_code = compile(src, code.func_name, "exec") + register_fn_in_linecache(code.func_name, src) + admin_client = self.get_admin_client_from_node(context) # load it exec(raw_byte_code) # nosec - internal_context = code.build_internal_context(context) + internal_context = code.build_internal_context(context, admin_client=admin_client) # execute it evil_string = f"{code.func_name}(*args, **kwargs,context=internal_context)" diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 244fb162d19..f0eded3a2d4 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -8,6 +8,7 @@ from copy import deepcopy import datetime from enum import Enum +import linecache import hashlib import inspect from io import StringIO @@ -279,7 +280,9 @@ class UserCode(SyncableSyftObject): input_kwargs: list[str] enclave_metadata: EnclaveMetadata | None = None submit_time: DateTime | None = None - uses_domain: bool = False # tracks if the code calls domain.something, variable is set during parsing + uses_domain: bool = ( + False # tracks if the code calls domain.something, variable is set during parsing + ) nested_codes: dict[str, tuple[LinkedObject, dict]] | None = {} worker_pool_name: str | None = None @@ -861,7 +864,9 @@ def _ephemeral_node_call( n_consumers=n_consumers, deploy_to="python", ) - ep_client = ep_node.login(email="info@openmined.org", password="changethis") # nosec + ep_client = ep_node.login( + email="info@openmined.org", password="changethis" + ) # nosec self.input_policy_init_kwargs = cast(dict, self.input_policy_init_kwargs) for node_id, obj_dict in self.input_policy_init_kwargs.items(): # api = APIRegistry.api_for( From 92e35c9fc80028c6d369288c934dc909d60da60f Mon Sep 17 00:00:00 2001 From: snyk-bot Date: Wed, 19 Jun 2024 03:54:25 +0000 Subject: [PATCH 189/313] fix: docs/requirements.txt to reduce vulnerabilities The following vulnerabilities are fixed by pinning transitive dependencies: - https://snyk.io/vuln/SNYK-PYTHON-URLLIB3-7267250 --- docs/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/requirements.txt b/docs/requirements.txt index a16817917de..b16716ced72 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -11,3 +11,4 @@ sphinx-autoapi==1.8.4 sphinx-code-include==1.1.1 sphinx-copybutton==0.4.0 sphinx-panels==0.6.0 +urllib3>=2.2.2 # not directly required, pinned by Snyk to avoid a vulnerability From dcd6dee23689ccfce647e3b4a805071e6bd9dc19 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 19 Jun 2024 11:18:37 +0530 Subject: [PATCH 190/313] fix wolfi python package --- packages/grid/backend/backend.dockerfile | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/packages/grid/backend/backend.dockerfile b/packages/grid/backend/backend.dockerfile index 08ea2c9a72a..55d390e2515 100644 --- a/packages/grid/backend/backend.dockerfile +++ b/packages/grid/backend/backend.dockerfile @@ -1,7 +1,12 @@ ARG PYTHON_VERSION="3.12" -ARG UV_VERSION="0.1.41-r0" +ARG UV_VERSION="0.2.13-r0" ARG TORCH_VERSION="2.3.0" +# wolfi-os pkg definition links +# https://github.com/wolfi-dev/os/blob/main/python-3.12.yaml +# https://github.com/wolfi-dev/os/blob/main/py3-pip.yaml +# https://github.com/wolfi-dev/os/blob/main/uv.yaml + # ==================== [BUILD STEP] Python Dev Base ==================== # FROM cgr.dev/chainguard/wolfi-base as syft_deps @@ -12,7 +17,9 @@ ARG TORCH_VERSION # Setup Python DEV RUN apk update && apk upgrade && \ - apk add build-base gcc python-$PYTHON_VERSION-dev-default uv=$UV_VERSION + apk add build-base gcc python-$PYTHON_VERSION-dev uv=$UV_VERSION && \ + # preemptive fix for wolfi-os breaking python entrypoint + (test -f /usr/bin/python || ln -s /usr/bin/python3.12 /usr/bin/python) WORKDIR /root/app @@ -44,7 +51,9 @@ ARG PYTHON_VERSION ARG UV_VERSION RUN apk update && apk upgrade && \ - apk add --no-cache git bash python-$PYTHON_VERSION-default py$PYTHON_VERSION-pip uv=$UV_VERSION + apk add --no-cache git bash python-$PYTHON_VERSION py$PYTHON_VERSION-pip uv=$UV_VERSION && \ + # preemptive fix for wolfi-os breaking python entrypoint + (test -f /usr/bin/python || ln -s /usr/bin/python3.12 /usr/bin/python) WORKDIR /root/app/ From d438cb32ae2b6e4321f29aaa49b30e1299479443 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 19 Jun 2024 11:30:34 +0530 Subject: [PATCH 191/313] fix worker_cpu --- packages/grid/backend/grid/images/worker_cpu.dockerfile | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/grid/backend/grid/images/worker_cpu.dockerfile b/packages/grid/backend/grid/images/worker_cpu.dockerfile index f1b6207ce90..24077464a58 100644 --- a/packages/grid/backend/grid/images/worker_cpu.dockerfile +++ b/packages/grid/backend/grid/images/worker_cpu.dockerfile @@ -5,13 +5,10 @@ # NOTE: This dockerfile will be built inside a grid-backend container in PROD # Hence COPY will not work the same way in DEV vs. PROD -# FIXME: Due to dependency on grid-backend base, python can only be changed from 3.11 to 3.11-dev -# Later we'd want to uninstall old python, and then install a new python runtime... -# ... but pre-built syft deps may break! - ARG SYFT_VERSION_TAG="0.8.7-beta.10" FROM openmined/grid-backend:${SYFT_VERSION_TAG} +# should match base image python version ARG PYTHON_VERSION="3.12" ARG SYSTEM_PACKAGES="" ARG PIP_PACKAGES="pip --dry-run" @@ -22,7 +19,9 @@ ENV SYFT_WORKER="true" \ SYFT_VERSION_TAG=${SYFT_VERSION_TAG} \ UV_HTTP_TIMEOUT=600 -RUN apk update && apk upgrade && \ +# dont run `apk upgrade` here, as it runs upgrades on the base image +# which may break syft or carry over breaking changes by wolfi-os +RUN apk update && \ apk add --no-cache ${SYSTEM_PACKAGES} && \ # if uv is present then run uv pip install else simple pip install if [ -x "$(command -v uv)" ]; then uv pip install --no-cache ${PIP_PACKAGES}; else pip install --user ${PIP_PACKAGES}; fi && \ From 1196f0516571b788107feba97e138acfbd595849 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Wed, 19 Jun 2024 16:26:05 +1000 Subject: [PATCH 192/313] Added notebook for auto approve rate limiter - Ported some functionality over from beach --- notebooks/scenarios/bigquery/.gitignore | 1 + .../bigquery/05-autoapprove-ratelimiter.ipynb | 553 ++++++++++++++++++ packages/syft/src/syft/client/client.py | 16 + packages/syft/src/syft/service/api/api.py | 16 +- .../syft/src/syft/service/api/api_service.py | 90 ++- .../syft/src/syft/service/code/user_code.py | 9 +- .../src/syft/service/user/user_service.py | 15 +- tox.ini | 8 +- 8 files changed, 688 insertions(+), 20 deletions(-) create mode 100644 notebooks/scenarios/bigquery/.gitignore create mode 100644 notebooks/scenarios/bigquery/05-autoapprove-ratelimiter.ipynb diff --git a/notebooks/scenarios/bigquery/.gitignore b/notebooks/scenarios/bigquery/.gitignore new file mode 100644 index 00000000000..63b990492d4 --- /dev/null +++ b/notebooks/scenarios/bigquery/.gitignore @@ -0,0 +1 @@ +service_account.json \ No newline at end of file diff --git a/notebooks/scenarios/bigquery/05-autoapprove-ratelimiter.ipynb b/notebooks/scenarios/bigquery/05-autoapprove-ratelimiter.ipynb new file mode 100644 index 00000000000..b6510e69cb1 --- /dev/null +++ b/notebooks/scenarios/bigquery/05-autoapprove-ratelimiter.ipynb @@ -0,0 +1,553 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "d23b4d75-94b3-4dc8-8741-c3fed91747c5", + "metadata": {}, + "outputs": [], + "source": [ + "project_id = \"\" # add your bigquery project_id\n", + "database = \"test_1gb\"\n", + "table = \"accounts\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f72e70bd-1aa4-45bc-b142-5a8c36756eb5", + "metadata": {}, + "outputs": [], + "source": [ + "query = f\"SELECT * FROM {project_id}.{database}.{table} limit 10\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5118c9cc-e3fc-4768-a646-5c507f39c733", + "metadata": {}, + "outputs": [], + "source": [ + "# !uv pip install db-dtypes\n", + "# !uv pip install google-cloud-bigquery==3.24.0" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "57c97872-0ebd-4ece-8c3e-88d0a27bfce8", + "metadata": {}, + "outputs": [], + "source": [ + "SYFT_VERSION = \">=0.8.7.b0,<0.9\"\n", + "package_string = f'\"syft{SYFT_VERSION}\"'\n", + "# %pip install {package_string} -q" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5313ef79-c7a0-4f5c-9a61-f2029d060c04", + "metadata": {}, + "outputs": [], + "source": [ + "# syft absolute\n", + "import syft as sy" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0d736170-3d96-47ca-921c-c97b6f3eca60", + "metadata": {}, + "outputs": [], + "source": [ + "node_name = \"bigquery-high-side\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9ec3c500-0ec1-4701-ae8a-b9509638b9ae", + "metadata": {}, + "outputs": [], + "source": [ + "node = sy.orchestra.launch(\n", + " name=node_name,\n", + " dev_mode=True,\n", + " create_producer=True,\n", + " n_consumers=1,\n", + " reset=True,\n", + " port=\"auto\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2ba848b8-cbda-4686-8ecd-392702fe504f", + "metadata": {}, + "outputs": [], + "source": [ + "high_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6b4f7f3d-554c-4767-87c1-9650103f6014", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import json\n", + "\n", + "SERVICE_ACCOUNT = json.loads(open(\"service_account.json\").read())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1701adfd-c570-4ebd-b75e-853fb3f665af", + "metadata": {}, + "outputs": [], + "source": [ + "@sy.api_endpoint_method(\n", + " settings={\"SERVICE_ACCOUNT\": SERVICE_ACCOUNT},\n", + ")\n", + "def query_endpoint(context, sql: str) -> str:\n", + " # syft absolute\n", + " import syft as sy\n", + "\n", + " result = None\n", + " try:\n", + " # build up the state object so we can track a users sql submissions\n", + " # stdlib\n", + " from datetime import datetime\n", + "\n", + " query_time = datetime.now()\n", + "\n", + " user_email = context.user.email\n", + " submissions = context.state.get(\"submissions\", {})\n", + " if user_email not in submissions:\n", + " submissions[user_email] = {\"queries\": {}, \"results\": {}}\n", + "\n", + " query_str = sql.strip()\n", + " # exit early\n", + " if query_str in submissions[user_email][\"queries\"]:\n", + " # they have already submitted this query so just return the existing uid\n", + " result_ptr_id = submissions[user_email][\"queries\"][query_str]\n", + " return (\n", + " \"Query submitted for approval. \"\n", + " + f'Call client.bigquery.get_result(uid=\"{result_ptr_id}\") '\n", + " + 'to get your result.\"'\n", + " )\n", + "\n", + " # do the query\n", + " # third party\n", + " from google.cloud import bigquery\n", + " from google.oauth2 import service_account\n", + "\n", + " credentials = service_account.Credentials.from_service_account_info(\n", + " context.settings[\"SERVICE_ACCOUNT\"]\n", + " )\n", + " scoped_credentials = credentials.with_scopes(\n", + " [\"https://www.googleapis.com/auth/bigquery\"]\n", + " )\n", + "\n", + " client = bigquery.Client(\n", + " credentials=scoped_credentials,\n", + " location=\"us-west1\",\n", + " )\n", + "\n", + " query_job = client.query(sql)\n", + " # convert to pandas dataframe\n", + " result = query_job.result().to_dataframe()\n", + " except Exception:\n", + " result = sy.SyftError(\n", + " message=f\"There was an error running {sql}. Please contact an admin.\"\n", + " )\n", + "\n", + " try:\n", + " # upload to blob storage\n", + " result_obj = sy.ActionObject.from_obj(result)\n", + "\n", + " def to_blobstorage(action_obj, client):\n", + " action_obj.syft_node_location = client.id\n", + " action_obj.syft_client_verify_key = client.verify_key\n", + " action_obj._save_to_blob_storage()\n", + " action_obj.syft_action_data_cache = action_obj.as_empty_data()\n", + " action_obj.syft_action_data_repr_ = str(action_obj.syft_action_data_type)\n", + " action_obj.syft_action_data_str_ = str(action_obj.syft_action_data_type)\n", + " action_obj_ptr = client.api.services.action.set(\n", + " action_obj, add_storage_permission=True\n", + " )\n", + " return action_obj_ptr\n", + "\n", + " result_ptr = to_blobstorage(result_obj, context.admin_client)\n", + "\n", + " # store time and uid of result in state and return to user\n", + " result_ptr_str = str(result_ptr.id).lower()\n", + " submissions[user_email][\"queries\"][query_str] = (\n", + " result_ptr_str # for this function\n", + " )\n", + " submissions[user_email][\"results\"][result_ptr_str] = (\n", + " query_time # for the results function\n", + " )\n", + " context.state[\"submissions\"] = submissions\n", + " return (\n", + " \"Query submitted for approval. \"\n", + " + f'Call client.bigquery.get_result(uid=\"{str(result_ptr.id)}\") '\n", + " + 'to get your result.\"'\n", + " )\n", + " except Exception:\n", + " # unable to generate the action object\n", + " return sy.SyftError(\n", + " message=f\"There was an error running {sql}. Please contact an admin.\"\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "69defe0e-cfca-4f41-a62c-1df1a2268e4d", + "metadata": {}, + "outputs": [], + "source": [ + "query_endpoint.view_access" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4ffd4098-8b0c-4123-b526-e8010306e622", + "metadata": {}, + "outputs": [], + "source": [ + "query_endpoint.view_access = False\n", + "query_endpoint.view_access" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f339e26b-5186-434e-bdf5-002af0c44f48", + "metadata": {}, + "outputs": [], + "source": [ + "new_endpoint = sy.TwinAPIEndpoint(\n", + " path=\"bigquery.query\",\n", + " mock_function=query_endpoint,\n", + " private_function=query_endpoint,\n", + " endpoint_timeout=60,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "38c1858e-7262-4665-9366-40a6c4dab142", + "metadata": {}, + "outputs": [], + "source": [ + "high_client.api.services.api.delete(endpoint_path=\"bigquery.query\")\n", + "response = high_client.api.services.api.add(endpoint=new_endpoint)\n", + "response" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d01e4b1f-339a-4067-9320-5e530bf7e4ed", + "metadata": {}, + "outputs": [], + "source": [ + "@sy.api_endpoint(\n", + " path=\"bigquery.get_result\", settings={\"approve_seconds\": 60}, endpoint_timeout=60\n", + ")\n", + "def get_result(context, uid: str) -> str:\n", + " # syft absolute\n", + " import syft as sy\n", + "\n", + " try:\n", + " # check of the user and uid are in the state\n", + " # stdlib\n", + " from datetime import datetime\n", + " from datetime import timedelta\n", + "\n", + " user_email = context.user.email\n", + "\n", + " # get the submissions from the other endpoint state\n", + " endpoint = context.admin_client.api.services.api.get(api_path=\"bigquery.query\")\n", + " submissions = {}\n", + " if hasattr(endpoint, \"mock_function\"):\n", + " submissions_obj = endpoint.mock_function.state\n", + " submissions = submissions_obj.get(\"submissions\", {})\n", + "\n", + " if user_email not in submissions:\n", + " submissions[user_email] = {\"queries\": {}, \"results\": {}}\n", + "\n", + " uid_str = uid.strip().lower()\n", + " if uid_str not in submissions[user_email][\"results\"]:\n", + " # no uid for this user\n", + " return f'There is no result matching {uid}. Please contact an admin.\"'\n", + " request_time = submissions[user_email][\"results\"][uid_str]\n", + " seconds = int(context.settings[\"approve_seconds\"])\n", + "\n", + " delta = timedelta(seconds=seconds)\n", + " result_ready = request_time + delta\n", + " if datetime.now() < result_ready:\n", + " # query not ready\n", + " return sy.SyftNotReady(\n", + " message=f\"The request to run the query for {uid} is pending. Try again later.\"\n", + " )\n", + " # fall through\n", + " except Exception:\n", + " return sy.SyftError(\n", + " message=f\"There was an error fetching {uid}. Please contact an admin.\"\n", + " )\n", + "\n", + " try:\n", + " # go get the data and return it\n", + " id = sy.UID(uid)\n", + " action_obj = context.admin_client.api.services.action.get(uid=id)\n", + " return action_obj\n", + " except Exception:\n", + " return sy.SyftError(\n", + " message=f\"There was an error fetching {uid}. Please contact an admin.\"\n", + " )\n", + "\n", + "\n", + "high_client.api.services.api.delete(endpoint_path=\"bigquery.get_result\")\n", + "response = high_client.api.services.api.add(endpoint=get_result)\n", + "response" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "030fd15a-edc7-41e4-b59f-463e8be598bc", + "metadata": {}, + "outputs": [], + "source": [ + "high_client.api.services.api" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16c9f2f0-a988-416d-b52a-628cf27f8bff", + "metadata": {}, + "outputs": [], + "source": [ + "high_client.register(\n", + " name=\"Jane Doe\",\n", + " email=\"jane@caltech.edu\",\n", + " password=\"abc123\",\n", + " password_verify=\"abc123\",\n", + " institution=\"Caltech\",\n", + " website=\"https://www.caltech.edu/\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7c20a33e-5784-4f10-a08d-22293bc939bc", + "metadata": {}, + "outputs": [], + "source": [ + "jane_client = high_client.login_as(email=\"jane@caltech.edu\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9f752816-8347-4ca0-9dbe-1e336b593eca", + "metadata": {}, + "outputs": [], + "source": [ + "res = jane_client.api.bigquery.query(sql=query)\n", + "res" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0c66df31-6a98-442e-847f-b36e304d050f", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "import re\n", + "\n", + "\n", + "def extract_uid(input_string):\n", + " match = re.search(r'uid=\"([^\"]+)\"', input_string)\n", + " if match:\n", + " return match.group(1)\n", + " else:\n", + " return None\n", + "\n", + "\n", + "uid = extract_uid(str(res))\n", + "uid" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a36ecb92-2570-4151-a2dc-404d24dbabee", + "metadata": {}, + "outputs": [], + "source": [ + "res = jane_client.api.bigquery.get_result(uid=uid)\n", + "res" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3f0f4f5e-40f2-47af-8ec2-a08932beaf94", + "metadata": {}, + "outputs": [], + "source": [ + "res = jane_client.api.bigquery.get_result(uid=uid)\n", + "res" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c4a919a9-3c6a-4d84-9d63-d95888c2b920", + "metadata": {}, + "outputs": [], + "source": [ + "@sy.api_endpoint_method()\n", + "def set_endpoint_state(context, endpoint_path: str, state: dict):\n", + " # syft absolute\n", + " import syft as sy\n", + "\n", + " if hasattr(state, \"syft_action_data\"):\n", + " state = state.syft_action_data\n", + " if not isinstance(state, dict):\n", + " return sy.SyftError(message=f\"state is {type(state)}, must be dict\")\n", + " result = context.admin_client.api.services.api.set_state(\n", + " api_path=endpoint_path, state=state, both=True\n", + " )\n", + " return result\n", + "\n", + "\n", + "@sy.api_endpoint_method()\n", + "def empty_mock(context, endpoint_path: str, state: dict):\n", + " return \"not allowed\"\n", + "\n", + "\n", + "new_endpoint = sy.TwinAPIEndpoint(\n", + " path=\"state.set\",\n", + " mock_function=empty_mock,\n", + " private_function=set_endpoint_state,\n", + ")\n", + "\n", + "high_client.api.services.api.delete(endpoint_path=\"state.set\")\n", + "high_client.api.services.api.add(endpoint=new_endpoint)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d2d191f8-7c64-4a01-9e40-be77952321d1", + "metadata": {}, + "outputs": [], + "source": [ + "# get the state\n", + "current_state = high_client.api.services.api.get(\n", + " api_path=\"bigquery.query\"\n", + ").mock_function.state\n", + "current_state" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cadc7de4-5e2b-437c-9f5b-b944638e45a3", + "metadata": {}, + "outputs": [], + "source": [ + "# reset the state\n", + "high_client.api.services.state.set(endpoint_path=\"bigquery.query\", state={})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "de988b2d-0d3d-4eec-b981-396698f8acb6", + "metadata": {}, + "outputs": [], + "source": [ + "high_client.api.services.api.get(api_path=\"bigquery.query\").mock_function.state" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2e91fa45-bc2f-4312-83ac-e308e41ba58d", + "metadata": {}, + "outputs": [], + "source": [ + "current_state" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0c227eef-7b7e-454f-9285-0cd69309e871", + "metadata": {}, + "outputs": [], + "source": [ + "# restore it\n", + "high_client.api.services.state.set(endpoint_path=\"bigquery.query\", state=current_state)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "284f0ee0-801c-4af4-b8bf-eb99246fb0d0", + "metadata": {}, + "outputs": [], + "source": [ + "high_client.api.services.api.get(api_path=\"bigquery.query\").mock_function.state" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1b84e0ef-b7ac-43c1-8765-3e02888156c9", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index f0d9c55b340..35784e6455a 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -777,6 +777,22 @@ def login_as_guest(self) -> Self: return _guest_client + def login_as(self, email: str) -> Self: + user_private_key = self.api.services.user.key_for_email(email=email) + if not isinstance(user_private_key, UserPrivateKey): + return user_private_key + if self.metadata is not None: + print( + f"Logged into <{self.name}: {self.metadata.node_side_type.capitalize()}-side " + f"{self.metadata.node_type.capitalize()}> as {email}" + ) + + return self.__class__( + connection=self.connection, + credentials=user_private_key.signing_key, + metadata=self.metadata, + ) + def login( self, email: str | None = None, diff --git a/packages/syft/src/syft/service/api/api.py b/packages/syft/src/syft/service/api/api.py index 27b84da1ef8..f4a69c89f12 100644 --- a/packages/syft/src/syft/service/api/api.py +++ b/packages/syft/src/syft/service/api/api.py @@ -1,10 +1,10 @@ # stdlib import ast from collections.abc import Callable -import linecache import inspect from inspect import Signature import keyword +import linecache import re import textwrap from typing import Any @@ -17,10 +17,10 @@ from result import Err from result import Ok from result import Result -from syft.client.client import SyftClient # relative from ...abstract_node import AbstractNode +from ...client.client import SyftClient from ...serde.serializable import serializable from ...serde.signature import signature_remove_context from ...types.syft_object import PartialSyftObject @@ -75,6 +75,7 @@ def get_signature(func: Callable) -> Signature: sig = signature_remove_context(sig) return sig + def register_fn_in_linecache(fname: str, src: str) -> None: """adds a function to linecache, such that inspect.getsource works for functions nested in this function. This only works if the same function is compiled under the same filename""" @@ -202,8 +203,9 @@ def update_state(self, state: dict[Any, Any]) -> None: self.state = state def build_internal_context( - self, context: AuthedServiceContext, - admin_client: SyftClient | None, + self, + context: AuthedServiceContext, + admin_client: SyftClient | None = None, ) -> TwinAPIAuthedContext: helper_function_dict: dict[str, Callable] = {} self.helper_functions = self.helper_functions or {} @@ -256,7 +258,7 @@ def call_locally( # load it exec(raw_byte_code) # nosec - internal_context = self.build_internal_context(context) + internal_context = self.build_internal_context(context=context) # execute it evil_string = f"{self.func_name}(*args, **kwargs,context=internal_context)" @@ -502,7 +504,9 @@ def exec_code( # load it exec(raw_byte_code) # nosec - internal_context = code.build_internal_context(context, admin_client=admin_client) + internal_context = code.build_internal_context( + context=context, admin_client=admin_client + ) # execute it evil_string = f"{code.func_name}(*args, **kwargs,context=internal_context)" diff --git a/packages/syft/src/syft/service/api/api_service.py b/packages/syft/src/syft/service/api/api_service.py index 4084814001c..6f04f279f1e 100644 --- a/packages/syft/src/syft/service/api/api_service.py +++ b/packages/syft/src/syft/service/api/api_service.py @@ -222,6 +222,88 @@ def view( return api_endpoint.to(TwinAPIEndpointView, context=context) + @service_method( + path="api.get", + name="get", + roles=ADMIN_ROLE_LEVEL, + ) + def get( + self, context: AuthedServiceContext, api_path: str + ) -> TwinAPIEndpoint | SyftError: + """Retrieves an specific API endpoint.""" + result = self.stash.get_by_path(context.node.verify_key, api_path) + if result.is_err(): + return SyftError(message=result.err()) + api_endpoint = result.ok() + + return api_endpoint + + @service_method( + path="api.set_state", + name="set_state", + roles=ADMIN_ROLE_LEVEL, + ) + def set_state( + self, + context: AuthedServiceContext, + api_path: str, + state: dict, + private: bool = False, + mock: bool = False, + both: bool = False, + ) -> TwinAPIEndpoint | SyftError: + """Sets the state of a specific API endpoint.""" + if both: + private = True + mock = True + result = self.stash.get_by_path(context.node.verify_key, api_path) + if result.is_err(): + return SyftError(message=result.err()) + api_endpoint = result.ok() + + if private and api_endpoint.private_function: + api_endpoint.private_function.state = state + if mock and api_endpoint.mock_function: + api_endpoint.mock_function.state = state + + result = self.stash.upsert(context.credentials, endpoint=api_endpoint) + if result.is_err(): + return SyftError(message=result.err()) + return SyftSuccess(message=f"APIEndpoint {api_path} state updated.") + + @service_method( + path="api.set_settings", + name="set_settings", + roles=ADMIN_ROLE_LEVEL, + ) + def set_settings( + self, + context: AuthedServiceContext, + api_path: str, + settings: dict, + private: bool = False, + mock: bool = False, + both: bool = False, + ) -> TwinAPIEndpoint | SyftError: + """Sets the settings of a specific API endpoint.""" + if both: + private = True + mock = True + result = self.stash.get_by_path(context.node.verify_key, api_path) + if result.is_err(): + return SyftError(message=result.err()) + api_endpoint = result.ok() + + if private and api_endpoint.private_function: + api_endpoint.private_function.settings = settings + if mock and api_endpoint.mock_function: + api_endpoint.mock_function.settings = settings + + result = self.stash.upsert(context.credentials, endpoint=api_endpoint) + if result.is_err(): + return SyftError(message=result.err()) + return SyftSuccess(message=f"APIEndpoint {api_path} settings updated.") + @service_method( path="api.api_endpoints", name="api_endpoints", @@ -359,7 +441,7 @@ def get_public_context( if isinstance(custom_endpoint, SyftError): return custom_endpoint - return custom_endpoint.mock_function.build_internal_context(context).to( + return custom_endpoint.mock_function.build_internal_context(context=context).to( TwinAPIContextView ) @@ -383,9 +465,9 @@ def get_private_context( PrivateAPIEndpoint, custom_endpoint.private_function ) - return custom_endpoint.private_function.build_internal_context(context).to( - TwinAPIContextView - ) + return custom_endpoint.private_function.build_internal_context( + context=context + ).to(TwinAPIContextView) @service_method(path="api.get_all", name="get_all", roles=ADMIN_ROLE_LEVEL) def get_all( diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index f0eded3a2d4..244fb162d19 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -8,7 +8,6 @@ from copy import deepcopy import datetime from enum import Enum -import linecache import hashlib import inspect from io import StringIO @@ -280,9 +279,7 @@ class UserCode(SyncableSyftObject): input_kwargs: list[str] enclave_metadata: EnclaveMetadata | None = None submit_time: DateTime | None = None - uses_domain: bool = ( - False # tracks if the code calls domain.something, variable is set during parsing - ) + uses_domain: bool = False # tracks if the code calls domain.something, variable is set during parsing nested_codes: dict[str, tuple[LinkedObject, dict]] | None = {} worker_pool_name: str | None = None @@ -864,9 +861,7 @@ def _ephemeral_node_call( n_consumers=n_consumers, deploy_to="python", ) - ep_client = ep_node.login( - email="info@openmined.org", password="changethis" - ) # nosec + ep_client = ep_node.login(email="info@openmined.org", password="changethis") # nosec self.input_policy_init_kwargs = cast(dict, self.input_policy_init_kwargs) for node_id, obj_dict in self.input_policy_init_kwargs.items(): # api = APIRegistry.api_for( diff --git a/packages/syft/src/syft/service/user/user_service.py b/packages/syft/src/syft/service/user/user_service.py index 63425f90103..7bd6b7e68af 100644 --- a/packages/syft/src/syft/service/user/user_service.py +++ b/packages/syft/src/syft/service/user/user_service.py @@ -5,7 +5,6 @@ from ...exceptions.user import UserAlreadyExistsException from ...node.credentials import SyftSigningKey from ...node.credentials import SyftVerifyKey -from ...node.credentials import UserLoginCredentials from ...serde.serializable import serializable from ...store.document_store import DocumentStore from ...store.linked_obj import LinkedObject @@ -361,6 +360,18 @@ def get_target_object( else: return user + @service_method( + path="user.key_for_email", name="key_for_email", roles=DATA_OWNER_ROLE_LEVEL + ) + def key_for_email( + self, context: AuthedServiceContext, email: str + ) -> UserPrivateKey | SyftError: + result = self.stash.get_by_email(credentials=context.credentials, email=email) + if result.is_ok(): + user = result.ok() + return user.to(UserPrivateKey) + return SyftError(message=str(result.err())) + @service_method(path="user.delete", name="delete", roles=GUEST_ROLE_LEVEL) def delete(self, context: AuthedServiceContext, uid: UID) -> bool | SyftError: # third party @@ -395,7 +406,7 @@ def delete(self, context: AuthedServiceContext, uid: UID) -> bool | SyftError: def exchange_credentials( self, context: UnauthedServiceContext - ) -> UserLoginCredentials | SyftError: + ) -> UserPrivateKey | SyftError: """Verify user TODO: We might want to use a SyftObject instead """ diff --git a/tox.ini b/tox.ini index 3f8d1601474..d15fd50c6da 100644 --- a/tox.ini +++ b/tox.ini @@ -207,8 +207,14 @@ deps = {[testenv:syft]deps} jupyter jupyterlab +allowlist_externals = + bash commands = - jupyter lab --ip 0.0.0.0 --ServerApp.token={posargs} + bash -c 'if [ -z "{posargs}" ]; then \ + jupyter lab --ip 0.0.0.0; \ + else \ + jupyter lab --ip 0.0.0.0 --ServerApp.token={posargs}; \ + fi' [testenv:syft.protocol.check] description = Syft Protocol Check From dbefae518c087e155b78b1f20604b4554f4436b7 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Wed, 19 Jun 2024 12:30:09 +0300 Subject: [PATCH 193/313] expand user code test --- .../syft/src/syft/service/code/user_code.py | 3 +- .../syft/tests/syft/users/user_code_test.py | 38 +++++++++++++++---- 2 files changed, 33 insertions(+), 8 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 57943fd8b81..e1ba6beabfe 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -33,8 +33,9 @@ # relative from ...abstract_node import NodeSideType from ...abstract_node import NodeType -from ...client.api import APIRegistry, generate_remote_function +from ...client.api import APIRegistry from ...client.api import NodeIdentity +from ...client.api import generate_remote_function from ...client.enclave_client import EnclaveMetadata from ...node.credentials import SyftVerifyKey from ...serde.deserialize import _deserialize diff --git a/packages/syft/tests/syft/users/user_code_test.py b/packages/syft/tests/syft/users/user_code_test.py index f006525097e..882f37e4496 100644 --- a/packages/syft/tests/syft/users/user_code_test.py +++ b/packages/syft/tests/syft/users/user_code_test.py @@ -74,23 +74,47 @@ def test_user_code(worker) -> None: assert multi_call_res.get() == result.get() -def test_duplicated_user_code(worker, guest_client: User) -> None: +def test_duplicated_user_code(worker) -> None: + worker.root_client.register( + name="Jane Doe", + email="jane@caltech.edu", + password="abc123", + password_verify="abc123", + institution="Caltech", + website="https://www.caltech.edu/", + ) + ds_client = worker.root_client.login( + email="jane@caltech.edu", + password="abc123", + ) + # mock_syft_func() - result = guest_client.api.services.code.request_code_execution(mock_syft_func) + result = ds_client.api.services.code.request_code_execution(mock_syft_func) assert isinstance(result, Request) - assert len(guest_client.code.get_all()) == 1 + assert len(ds_client.code.get_all()) == 1 # request the exact same code should return an error - result = guest_client.api.services.code.request_code_execution(mock_syft_func) + result = ds_client.api.services.code.request_code_execution(mock_syft_func) assert isinstance(result, SyftError) - assert len(guest_client.code.get_all()) == 1 + assert len(ds_client.code.get_all()) == 1 # request the a different function name but same content will also succeed # flaky if not blocking mock_syft_func_2(syft_no_node=True) - result = guest_client.api.services.code.request_code_execution(mock_syft_func_2) + result = ds_client.api.services.code.request_code_execution(mock_syft_func_2) assert isinstance(result, Request) - assert len(guest_client.code.get_all()) == 2 + assert len(ds_client.code.get_all()) == 2 + + code_history = ds_client.code_history + assert code_history.code_versions, "No code version found." + + code_histories = worker.root_client.code_histories + user_code_history = code_histories[ds_client.logged_in_user] + assert not isinstance(user_code_history, SyftError) + assert user_code_history.code_versions, "No code version found." + + result = user_code_history.mock_syft_func_2[0]() + assert result.get() == 1 def random_hash() -> str: From 12c0bbe81d36db952da362156e8ad498b3c8fd73 Mon Sep 17 00:00:00 2001 From: teo Date: Wed, 19 Jun 2024 15:00:14 +0300 Subject: [PATCH 194/313] added status message to err --- .../syft/src/syft/service/code/user_code_service.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 9287e49fec4..9302dd37404 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -519,10 +519,12 @@ def _call( if not can_execute: # We check output policy only in l2 deployment. # code is from low side (L0 setup) - if not code.is_output_policy_approved(context): - return Err( - "Execution denied: Your code is waiting for approval" - ) + status = code.get_status(context) + if not status.approved: + # return Err( + # "Execution denied: Your code is waiting for approval" + # ) + return Err(status.get_status_message().message) is_valid = ( output_policy._is_valid(context) if output_policy else False ) From 6d735b40f4593bca06ff8a35ce60aa6906745e6b Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Wed, 19 Jun 2024 15:04:24 +0300 Subject: [PATCH 195/313] remove self and context from function signature --- packages/syft/src/syft/service/code/user_code.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index e1ba6beabfe..06319d4828c 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -41,6 +41,8 @@ from ...serde.deserialize import _deserialize from ...serde.serializable import serializable from ...serde.serialize import _serialize +from ...serde.signature import signature_remove_context +from ...serde.signature import signature_remove_self from ...store.document_store import PartitionKey from ...store.linked_obj import LinkedObject from ...types.datetime import DateTime @@ -884,6 +886,10 @@ def __call__(self, *args: Any, **kwargs: Any) -> Any: api = self._get_api() if isinstance(api, SyftError): return api + + signature = self.signature + signature = signature_remove_self(signature) + signature = signature_remove_context(signature) remote_user_function = generate_remote_function( api=api, node_uid=self.node_uid, From 9e887b5eaf78d15eebf8c80aedcba169f001fa71 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Wed, 19 Jun 2024 14:08:13 +0200 Subject: [PATCH 196/313] feat: add nh3 --- packages/syft/setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 59bfee973ea..855086084b8 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -65,6 +65,7 @@ syft = rich==13.7.1 jinja2==3.1.4 tenacity==8.3.0 + nh3==0.2.17 install_requires = %(syft)s From 2c73f0e8082853a760e72e4d32b8004326e82e4b Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 19 Jun 2024 14:46:00 +0200 Subject: [PATCH 197/313] demo fixes --- packages/syft/src/syft/node/node.py | 2 +- packages/syft/src/syft/service/policy/policy.py | 4 ++++ .../syft/src/syft/service/request/request_service.py | 11 +++++++++++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 31ead514b2b..dc7f5881dde 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -1446,8 +1446,8 @@ def add_queueitem_to_queue( ) # 🟡 TODO 36: Needs distributed lock - self.queue_stash.set_placeholder(credentials, queue_item) self.job_stash.set(credentials, job) + self.queue_stash.set_placeholder(credentials, queue_item) log_service = self.get_service("logservice") diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index 736bf85407c..920721323bc 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -68,6 +68,10 @@ def extract_uid(v: Any) -> UID: def filter_only_uids(results: Any) -> list[UID] | dict[str, UID] | UID: + # Prevent checking for __len__ on ActionObject (creates an Action) + if isinstance(results, ActionObject): + return extract_uid(results) + if not hasattr(results, "__len__"): results = [results] diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index b48f75f5149..1bd76c4cc24 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -104,6 +104,17 @@ def submit( print("Failed to submit Request", e) raise e + @service_method( + path="request.get_by_uid", name="get_by_uid", roles=DATA_SCIENTIST_ROLE_LEVEL + ) + def get_by_uid( + self, context: AuthedServiceContext, uid: UID + ) -> Request | None | SyftError: + result = self.stash.get_by_uid(context.credentials, uid) + if result.is_err(): + return SyftError(message=str(result.err())) + return result.ok() + @service_method( path="request.get_all", name="get_all", roles=DATA_SCIENTIST_ROLE_LEVEL ) From 11b71d4fc564f0ca4cb8851251a2409860922786 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Wed, 19 Jun 2024 17:31:04 +0200 Subject: [PATCH 198/313] fix: remove deprecated warning --- packages/syft/src/syft/__init__.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index d7183898935..e442a5302d9 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -94,21 +94,6 @@ logger.start() -try: - # third party - from IPython import get_ipython - - get_ipython() # noqa: F821 - # TODO: add back later or auto detect - # display( - # Markdown( - # "\nWarning: syft is imported in light mode by default. \ - # \nTo switch to dark mode, please run `sy.options.color_theme = 'dark'`" - # ) - # ) -except: # noqa: E722 - pass # nosec - def _patch_ipython_autocompletion() -> None: try: From 75f1e483d3073cbabddb4079457551541d83b699 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Wed, 19 Jun 2024 21:27:45 +0530 Subject: [PATCH 199/313] fix logging --- packages/grid/backend/grid/logger/__init__.py | 0 packages/grid/backend/grid/logger/config.py | 59 ----- packages/grid/backend/grid/logger/handler.py | 108 --------- packages/grid/backend/grid/logging.yaml | 46 ++++ packages/grid/backend/grid/main.py | 22 +- packages/grid/backend/grid/start.sh | 2 +- packages/syft/setup.cfg | 1 - packages/syft/src/syft/__init__.py | 142 ++++++------ packages/syft/src/syft/client/api.py | 4 +- packages/syft/src/syft/client/client.py | 6 +- .../syft/src/syft/client/domain_client.py | 10 +- packages/syft/src/syft/client/registry.py | 17 +- packages/syft/src/syft/client/syncing.py | 9 +- packages/syft/src/syft/node/node.py | 54 ++--- packages/syft/src/syft/node/routes.py | 14 +- packages/syft/src/syft/node/server.py | 10 +- .../src/syft/service/action/action_object.py | 68 +++--- .../src/syft/service/action/action_types.py | 10 +- .../syft/service/network/network_service.py | 4 +- .../src/syft/service/network/node_peer.py | 11 +- .../syft/src/syft/service/network/utils.py | 12 +- .../syft/service/notifier/notifier_service.py | 20 +- packages/syft/src/syft/service/queue/queue.py | 7 +- .../syft/src/syft/service/queue/zmq_queue.py | 209 +++++++++--------- .../syft/src/syft/service/request/request.py | 9 +- packages/syft/src/syft/service/service.py | 5 +- .../src/syft/service/settings/settings.py | 7 +- .../syft/src/syft/service/sync/diff_state.py | 11 +- .../src/syft/service/sync/sync_service.py | 4 +- .../syft/src/syft/service/worker/utils.py | 20 +- .../service/worker/worker_pool_service.py | 5 +- .../src/syft/store/blob_storage/__init__.py | 7 +- .../src/syft/store/blob_storage/seaweedfs.py | 5 +- .../syft/src/syft/store/document_store.py | 1 - packages/syft/src/syft/store/locks.py | 4 +- .../src/syft/store/sqlite_document_store.py | 5 +- packages/syft/src/syft/types/grid_url.py | 5 +- packages/syft/src/syft/types/syft_object.py | 8 +- packages/syft/src/syft/types/uid.py | 10 +- packages/syft/src/syft/util/logger.py | 134 ----------- .../components/tabulator_template.py | 6 +- packages/syft/src/syft/util/table.py | 6 +- packages/syft/src/syft/util/telemetry.py | 8 +- packages/syft/src/syft/util/util.py | 35 ++- ruff.toml | 1 + 45 files changed, 464 insertions(+), 677 deletions(-) delete mode 100644 packages/grid/backend/grid/logger/__init__.py delete mode 100644 packages/grid/backend/grid/logger/config.py delete mode 100644 packages/grid/backend/grid/logger/handler.py create mode 100644 packages/grid/backend/grid/logging.yaml delete mode 100644 packages/syft/src/syft/util/logger.py diff --git a/packages/grid/backend/grid/logger/__init__.py b/packages/grid/backend/grid/logger/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/packages/grid/backend/grid/logger/config.py b/packages/grid/backend/grid/logger/config.py deleted file mode 100644 index 000a9c9c713..00000000000 --- a/packages/grid/backend/grid/logger/config.py +++ /dev/null @@ -1,59 +0,0 @@ -"""This file defines the configuration for `loguru` which is used as the python logging client. -For more information refer to `loguru` documentation: https://loguru.readthedocs.io/en/stable/overview.html -""" - -# stdlib -from datetime import time -from datetime import timedelta -from enum import Enum -from functools import lru_cache - -# third party -from pydantic_settings import BaseSettings - - -# LOGURU_LEVEL type for version>3.8 -class LogLevel(Enum): - """Types of logging levels.""" - - TRACE = "TRACE" - DEBUG = "DEBUG" - INFO = "INFO" - SUCCESS = "SUCCESS" - WARNING = "WARNING" - ERROR = "ERROR" - CRITICAL = "CRITICAL" - - -class LogConfig(BaseSettings): - """Configuration for the logging client.""" - - # Logging format - LOGURU_FORMAT: str = ( - "{time:YYYY-MM-DD HH:mm:ss} | " - "{level: <8} | " - "{name}:{function}:{line}: " - "{message}" - ) - - LOGURU_LEVEL: str = LogLevel.INFO.value - LOGURU_SINK: str | None = "/var/log/pygrid/grid.log" - LOGURU_COMPRESSION: str | None = None - LOGURU_ROTATION: str | int | time | timedelta | None = None - LOGURU_RETENTION: str | int | timedelta | None = None - LOGURU_COLORIZE: bool | None = True - LOGURU_SERIALIZE: bool | None = False - LOGURU_BACKTRACE: bool | None = True - LOGURU_DIAGNOSE: bool | None = False - LOGURU_ENQUEUE: bool | None = True - LOGURU_AUTOINIT: bool | None = False - - -@lru_cache -def get_log_config() -> LogConfig: - """Returns the configuration for the logging client. - - Returns: - LogConfig: configuration for the logging client. - """ - return LogConfig() diff --git a/packages/grid/backend/grid/logger/handler.py b/packages/grid/backend/grid/logger/handler.py deleted file mode 100644 index 7f198bbcece..00000000000 --- a/packages/grid/backend/grid/logger/handler.py +++ /dev/null @@ -1,108 +0,0 @@ -# future -from __future__ import annotations - -# stdlib -from functools import lru_cache -import logging -from pprint import pformat -import sys - -# third party -import loguru -from loguru import logger - -# relative -from .config import get_log_config - - -class LogHandler: - def __init__(self) -> None: - self.config = get_log_config() - - def format_record(self, record: loguru.Record) -> str: - """ - Custom loguru log message format for handling JSON (in record['extra']) - """ - format_string: str = self.config.LOGURU_FORMAT - - if record["extra"] is not None: - for key in record["extra"].keys(): - record["extra"][key] = pformat( - record["extra"][key], indent=2, compact=False, width=88 - ) - format_string += "\n{extra[" + key + "]}" - - format_string += "{exception}\n" - - return format_string - - def init_logger(self) -> None: - """ - Redirects all registered std logging handlers to a loguru sink. - Call init_logger() on fastapi startup. - """ - intercept_handler = InterceptHandler() - - # Generalizes log level for all root loggers, including third party - logging.root.setLevel(self.config.LOGURU_LEVEL) - logging.root.handlers = [intercept_handler] - - for log in logging.root.manager.loggerDict.keys(): - log_instance = logging.getLogger(log) - log_instance.handlers = [] - log_instance.propagate = True - - logger.configure( - handlers=[ - { - "sink": sys.stdout, - "level": self.config.LOGURU_LEVEL, - "serialize": self.config.LOGURU_SERIALIZE, - "format": self.format_record, - } - ], - ) - - try: - if ( - self.config.LOGURU_SINK is not ("sys.stdout" or "sys.stderr") - and self.config.LOGURU_SINK is not None - ): - logger.add( - self.config.LOGURU_SINK, - retention=self.config.LOGURU_RETENTION, - rotation=self.config.LOGURU_ROTATION, - compression=self.config.LOGURU_COMPRESSION, - ) - logger.debug(f"Logging to {self.config.LOGURU_SINK}") - - except Exception as err: - logger.debug( - f"Failed creating a new sink. Check your log config. error: {err}" - ) - - -class InterceptHandler(logging.Handler): - """ - Check https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging - """ - - def emit(self, record: logging.LogRecord) -> None: - try: - level = logger.level(record.levelname).name - except ValueError: - level = record.levelno - - frame, depth = logging.currentframe(), 2 - while frame.f_code.co_filename == logging.__file__: - frame = frame.f_back # type: ignore - depth += 1 - - logger.opt(depth=depth, exception=record.exc_info).log( - level, record.getMessage() - ) - - -@lru_cache -def get_log_handler() -> LogHandler: - return LogHandler() diff --git a/packages/grid/backend/grid/logging.yaml b/packages/grid/backend/grid/logging.yaml new file mode 100644 index 00000000000..b41eb783038 --- /dev/null +++ b/packages/grid/backend/grid/logging.yaml @@ -0,0 +1,46 @@ +version: 1 +disable_existing_loggers: True +formatters: + default: + format: "%(asctime)s - %(levelname)s - %(name)s - %(message)s" + datefmt: "%Y-%m-%d %H:%M:%S" + uvicorn.default: + "()": uvicorn.logging.DefaultFormatter + format: "%(asctime)s - %(levelname)s - %(name)s - %(message)s" + uvicorn.access: + "()": "uvicorn.logging.AccessFormatter" + format: "%(asctime)s - %(levelname)s - %(name)s - %(message)s" + datefmt: "%Y-%m-%d %H:%M:%S" +handlers: + default: + formatter: default + class: logging.StreamHandler + stream: ext://sys.stdout + uvicorn.default: + formatter: uvicorn.default + class: logging.StreamHandler + stream: ext://sys.stdout + uvicorn.access: + formatter: uvicorn.access + class: logging.StreamHandler + stream: ext://sys.stdout +loggers: + uvicorn.error: + level: INFO + handlers: + - uvicorn.default + propagate: no + uvicorn.access: + level: INFO + handlers: + - uvicorn.access + propagate: no + syft: + level: INFO + handlers: + - default + propagate: no +root: + level: INFO + handlers: + - default diff --git a/packages/grid/backend/grid/main.py b/packages/grid/backend/grid/main.py index 9ca43dadee8..459448c5f01 100644 --- a/packages/grid/backend/grid/main.py +++ b/packages/grid/backend/grid/main.py @@ -1,7 +1,6 @@ -# stdlib - # stdlib from contextlib import asynccontextmanager +import logging from typing import Any # third party @@ -16,7 +15,15 @@ from grid.api.router import api_router from grid.core.config import settings from grid.core.node import worker -from grid.logger.handler import get_log_handler + + +class EndpointFilter(logging.Filter): + def filter(self, record: logging.LogRecord) -> bool: + return record.getMessage().find("/api/v2/?probe=livenessProbe") == -1 + + +logger = logging.getLogger("uvicorn.error") +logging.getLogger("uvicorn.access").addFilter(EndpointFilter()) @asynccontextmanager @@ -25,7 +32,7 @@ async def lifespan(app: FastAPI) -> Any: yield finally: worker.stop() - print("Worker Stop !!!") + logger.info("Worker Stop") app = FastAPI( @@ -34,7 +41,6 @@ async def lifespan(app: FastAPI) -> Any: lifespan=lifespan, ) -app.add_event_handler("startup", get_log_handler().init_logger) # Set all CORS enabled origins if settings.BACKEND_CORS_ORIGINS: @@ -47,13 +53,13 @@ async def lifespan(app: FastAPI) -> Any: ) app.include_router(api_router, prefix=settings.API_V2_STR) -print("Included routes, app should now be reachable") +logger.info("Included routes, app should now be reachable") if settings.DEV_MODE: - print("Staging protocol changes...") + logger.info("Staging protocol changes...") status = stage_protocol_changes() - print(status) + logger.info(f"Staging protocol result: {status}") # needed for Google Kubernetes Engine LoadBalancer Healthcheck diff --git a/packages/grid/backend/grid/start.sh b/packages/grid/backend/grid/start.sh index bcb36c5e5a9..4b3d5de4cf2 100755 --- a/packages/grid/backend/grid/start.sh +++ b/packages/grid/backend/grid/start.sh @@ -33,4 +33,4 @@ export NODE_TYPE=$NODE_TYPE echo "NODE_UID=$NODE_UID" echo "NODE_TYPE=$NODE_TYPE" -exec $DEBUG_CMD uvicorn $RELOAD --host $HOST --port $PORT --log-level $LOG_LEVEL "$APP_MODULE" +exec $DEBUG_CMD uvicorn $RELOAD --host $HOST --port $PORT --log-config=$APPDIR/grid/logging.yaml --log-level $LOG_LEVEL "$APP_MODULE" diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 59bfee973ea..944e4986751 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -30,7 +30,6 @@ syft = bcrypt==4.1.2 boto3==1.34.56 forbiddenfruit==0.1.4 - loguru==0.7.2 packaging>=23.0 pyarrow==15.0.0 pycapnp==2.0.0 diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index d7183898935..7aa5789aa30 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -9,79 +9,78 @@ from typing import Any # relative -from .abstract_node import NodeSideType # noqa: F401 -from .abstract_node import NodeType # noqa: F401 -from .client.client import connect # noqa: F401 -from .client.client import login # noqa: F401 -from .client.client import login_as_guest # noqa: F401 -from .client.client import register # noqa: F401 -from .client.domain_client import DomainClient # noqa: F401 -from .client.gateway_client import GatewayClient # noqa: F401 -from .client.registry import DomainRegistry # noqa: F401 -from .client.registry import EnclaveRegistry # noqa: F401 -from .client.registry import NetworkRegistry # noqa: F401 -from .client.search import Search # noqa: F401 -from .client.search import SearchResults # noqa: F401 -from .client.user_settings import UserSettings # noqa: F401 -from .client.user_settings import settings # noqa: F401 -from .custom_worker.config import DockerWorkerConfig # noqa: F401 -from .custom_worker.config import PrebuiltWorkerConfig # noqa: F401 -from .node.credentials import SyftSigningKey # noqa: F401 -from .node.domain import Domain # noqa: F401 -from .node.enclave import Enclave # noqa: F401 -from .node.gateway import Gateway # noqa: F401 -from .node.server import serve_node # noqa: F401 -from .node.server import serve_node as bind_worker # noqa: F401 -from .node.worker import Worker # noqa: F401 -from .orchestra import Orchestra as orchestra # noqa: F401 -from .protocol.data_protocol import bump_protocol_version # noqa: F401 -from .protocol.data_protocol import check_or_stage_protocol # noqa: F401 -from .protocol.data_protocol import get_data_protocol # noqa: F401 -from .protocol.data_protocol import stage_protocol_changes # noqa: F401 -from .serde import NOTHING # noqa: F401 -from .serde.deserialize import _deserialize as deserialize # noqa: F401 -from .serde.serializable import serializable # noqa: F401 -from .serde.serialize import _serialize as serialize # noqa: F401 -from .service.action.action_data_empty import ActionDataEmpty # noqa: F401 -from .service.action.action_object import ActionObject # noqa: F401 -from .service.action.plan import Plan # noqa: F401 -from .service.action.plan import planify # noqa: F401 -from .service.api.api import api_endpoint # noqa: F401 -from .service.api.api import api_endpoint_method # noqa: F401 -from .service.api.api import create_new_api_endpoint as TwinAPIEndpoint # noqa: F401 -from .service.code.user_code import UserCodeStatus # noqa: F401; noqa: F401 -from .service.code.user_code import syft_function # noqa: F401; noqa: F401 -from .service.code.user_code import syft_function_single_use # noqa: F401; noqa: F401 -from .service.data_subject import DataSubjectCreate as DataSubject # noqa: F401 -from .service.dataset.dataset import Contributor # noqa: F401 -from .service.dataset.dataset import CreateAsset as Asset # noqa: F401 -from .service.dataset.dataset import CreateDataset as Dataset # noqa: F401 -from .service.notification.notifications import NotificationStatus # noqa: F401 -from .service.policy.policy import CustomInputPolicy # noqa: F401 -from .service.policy.policy import CustomOutputPolicy # noqa: F401 -from .service.policy.policy import ExactMatch # noqa: F401 -from .service.policy.policy import SingleExecutionExactOutput # noqa: F401 -from .service.policy.policy import UserInputPolicy # noqa: F401 -from .service.policy.policy import UserOutputPolicy # noqa: F401 -from .service.project.project import ProjectSubmit as Project # noqa: F401 -from .service.request.request import SubmitRequest as Request # noqa: F401 -from .service.response import SyftError # noqa: F401 -from .service.response import SyftNotReady # noqa: F401 -from .service.response import SyftSuccess # noqa: F401 -from .service.user.roles import Roles as roles # noqa: F401 -from .service.user.user_service import UserService # noqa: F401 +from .abstract_node import NodeSideType +from .abstract_node import NodeType +from .client.client import connect +from .client.client import login +from .client.client import login_as_guest +from .client.client import register +from .client.domain_client import DomainClient +from .client.gateway_client import GatewayClient +from .client.registry import DomainRegistry +from .client.registry import EnclaveRegistry +from .client.registry import NetworkRegistry +from .client.search import Search +from .client.search import SearchResults +from .client.user_settings import UserSettings +from .client.user_settings import settings +from .custom_worker.config import DockerWorkerConfig +from .custom_worker.config import PrebuiltWorkerConfig +from .node.credentials import SyftSigningKey +from .node.domain import Domain +from .node.enclave import Enclave +from .node.gateway import Gateway +from .node.server import serve_node +from .node.server import serve_node as bind_worker +from .node.worker import Worker +from .orchestra import Orchestra as orchestra +from .protocol.data_protocol import bump_protocol_version +from .protocol.data_protocol import check_or_stage_protocol +from .protocol.data_protocol import get_data_protocol +from .protocol.data_protocol import stage_protocol_changes +from .serde import NOTHING +from .serde.deserialize import _deserialize as deserialize +from .serde.serializable import serializable +from .serde.serialize import _serialize as serialize +from .service.action.action_data_empty import ActionDataEmpty +from .service.action.action_object import ActionObject +from .service.action.plan import Plan +from .service.action.plan import planify +from .service.api.api import api_endpoint +from .service.api.api import api_endpoint_method +from .service.api.api import create_new_api_endpoint as TwinAPIEndpoint +from .service.code.user_code import UserCodeStatus +from .service.code.user_code import syft_function +from .service.code.user_code import syft_function_single_use +from .service.data_subject import DataSubjectCreate as DataSubject +from .service.dataset.dataset import Contributor +from .service.dataset.dataset import CreateAsset as Asset +from .service.dataset.dataset import CreateDataset as Dataset +from .service.notification.notifications import NotificationStatus +from .service.policy.policy import CustomInputPolicy +from .service.policy.policy import CustomOutputPolicy +from .service.policy.policy import ExactMatch +from .service.policy.policy import SingleExecutionExactOutput +from .service.policy.policy import UserInputPolicy +from .service.policy.policy import UserOutputPolicy +from .service.project.project import ProjectSubmit as Project +from .service.request.request import SubmitRequest as Request +from .service.response import SyftError +from .service.response import SyftNotReady +from .service.response import SyftSuccess +from .service.user.roles import Roles as roles +from .service.user.user_service import UserService from .stable_version import LATEST_STABLE_SYFT from .types.syft_object import SyftObject -from .types.twin_object import TwinObject # noqa: F401 -from .types.uid import UID # noqa: F401 -from .util import filterwarnings # noqa: F401 -from .util import logger # noqa: F401 -from .util import options # noqa: F401 -from .util.autoreload import disable_autoreload # noqa: F401 -from .util.autoreload import enable_autoreload # noqa: F401 -from .util.telemetry import instrument # noqa: F401 -from .util.util import autocache # noqa: F401 -from .util.util import get_root_data_path # noqa: F401 +from .types.twin_object import TwinObject +from .types.uid import UID +from .util import filterwarnings +from .util import options +from .util.autoreload import disable_autoreload +from .util.autoreload import enable_autoreload +from .util.telemetry import instrument +from .util.util import autocache +from .util.util import get_root_data_path from .util.version_compare import make_requires requires = make_requires(LATEST_STABLE_SYFT, __version__) @@ -92,7 +91,6 @@ sys.path.append(str(Path(__file__))) -logger.start() try: # third party diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index c4a3a1b40a9..0e61053a768 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -226,6 +226,9 @@ def sign(self, credentials: SyftSigningKey) -> SignedSyftAPICall: signature=signed_message.signature, ) + def __repr__(self) -> str: + return f"SyftAPICall(path={self.path}, args={self.args}, kwargs={self.kwargs}, blocking={self.blocking})" + @instrument @serializable() @@ -1266,7 +1269,6 @@ def monkey_patch_getdef(self: Any, obj: Any, oname: str = "") -> str | None: Inspector._getdef_bak = Inspector._getdef Inspector._getdef = types.MethodType(monkey_patch_getdef, Inspector) except Exception: - # print("Failed to monkeypatch IPython Signature Override") pass # nosec diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 2cec54d0e52..79f8dd21198 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -7,6 +7,7 @@ from enum import Enum from getpass import getpass import json +import logging from typing import Any from typing import TYPE_CHECKING from typing import cast @@ -48,7 +49,6 @@ from ..types.grid_url import GridURL from ..types.syft_object import SYFT_OBJECT_VERSION_2 from ..types.uid import UID -from ..util.logger import debug from ..util.telemetry import instrument from ..util.util import prompt_warning_message from ..util.util import thread_ident @@ -62,6 +62,8 @@ from .connection import NodeConnection from .protocol import SyftProtocol +logger = logging.getLogger(__name__) + if TYPE_CHECKING: # relative from ..service.network.node_peer import NodePeer @@ -77,7 +79,7 @@ def upgrade_tls(url: GridURL, response: Response) -> GridURL: if response.url.startswith("https://") and url.protocol == "http": # we got redirected to https https_url = GridURL.from_url(response.url).with_path("") - debug(f"GridURL Upgraded to HTTPS. {https_url}") + logger.debug(f"GridURL Upgraded to HTTPS. {https_url}") return https_url except Exception as e: print(f"Failed to upgrade to HTTPS. {e}") diff --git a/packages/syft/src/syft/client/domain_client.py b/packages/syft/src/syft/client/domain_client.py index 5cdaa88906d..b54ebf0bcf0 100644 --- a/packages/syft/src/syft/client/domain_client.py +++ b/packages/syft/src/syft/client/domain_client.py @@ -2,14 +2,15 @@ from __future__ import annotations # stdlib +import logging from pathlib import Path import re from string import Template +import traceback from typing import TYPE_CHECKING from typing import cast # third party -from loguru import logger import markdown from result import Result from tqdm import tqdm @@ -41,6 +42,8 @@ from .connection import NodeConnection from .protocol import SyftProtocol +logger = logging.getLogger(__name__) + if TYPE_CHECKING: # relative from ..orchestra import NodeHandle @@ -271,8 +274,9 @@ def upload_files( return ActionObject.from_obj(result).send(self) except Exception as err: - logger.debug("upload_files: Error creating action_object: {}", err) - return SyftError(message=f"Failed to upload files: {err}") + return SyftError( + message=f"Failed to upload files: {err}.\n{traceback.format_exc()}" + ) def connect_to_gateway( self, diff --git a/packages/syft/src/syft/client/registry.py b/packages/syft/src/syft/client/registry.py index 4128af452d8..4f239e2265d 100644 --- a/packages/syft/src/syft/client/registry.py +++ b/packages/syft/src/syft/client/registry.py @@ -4,6 +4,7 @@ # stdlib from concurrent import futures import json +import logging import os from typing import Any @@ -18,10 +19,9 @@ from ..service.response import SyftException from ..types.grid_url import GridURL from ..util.constants import DEFAULT_TIMEOUT -from ..util.logger import error -from ..util.logger import warning from .client import SyftClient as Client +logger = logging.getLogger(__name__) NETWORK_REGISTRY_URL = ( "https://raw.githubusercontent.com/OpenMined/NetworkRegistry/main/gateways.json" ) @@ -43,7 +43,7 @@ def __init__(self) -> None: network_json=network_json, version="2.0.0" ) except Exception as e: - warning( + logger.warning( f"Failed to get Network Registry, go checkout: {NETWORK_REGISTRY_REPO}. Exception: {e}" ) @@ -64,7 +64,7 @@ def load_network_registry_json() -> dict: return network_json except Exception as e: - warning( + logger.warning( f"Failed to get Network Registry from {NETWORK_REGISTRY_REPO}. Exception: {e}" ) return {} @@ -169,7 +169,6 @@ def create_client(network: dict[str, Any]) -> Client: client = connect(url=str(grid_url)) return client.guest() except Exception as e: - error(f"Failed to login with: {network}. {e}") raise SyftException(f"Failed to login with: {network}. {e}") def __getitem__(self, key: str | int) -> Client: @@ -194,7 +193,7 @@ def __init__(self) -> None: ) self._get_all_domains() except Exception as e: - warning( + logger.warning( f"Failed to get Network Registry, go checkout: {NETWORK_REGISTRY_REPO}. {e}" ) @@ -263,7 +262,7 @@ def online_domains(self) -> list[tuple[NodePeer, NodeMetadataJSON | None]]: try: network_client = NetworkRegistry.create_client(network) except Exception as e: - print(f"Error in creating network client with exception {e}") + logger.error(f"Error in creating network client {e}") continue domains: list[NodePeer] = network_client.domains.retrieve_nodes() @@ -334,7 +333,6 @@ def create_client(self, peer: NodePeer) -> Client: try: return peer.guest_client except Exception as e: - error(f"Failed to login to: {peer}. {e}") raise SyftException(f"Failed to login to: {peer}. {e}") def __getitem__(self, key: str | int) -> Client: @@ -364,7 +362,7 @@ def __init__(self) -> None: enclaves_json = response.json() self.all_enclaves = enclaves_json["2.0.0"]["enclaves"] except Exception as e: - warning( + logger.warning( f"Failed to get Enclave Registry, go checkout: {ENCLAVE_REGISTRY_REPO}. {e}" ) @@ -433,7 +431,6 @@ def create_client(enclave: dict[str, Any]) -> Client: client = connect(url=str(grid_url)) return client.guest() except Exception as e: - error(f"Failed to login with: {enclave}. {e}") raise SyftException(f"Failed to login with: {enclave}. {e}") def __getitem__(self, key: str | int) -> Client: diff --git a/packages/syft/src/syft/client/syncing.py b/packages/syft/src/syft/client/syncing.py index c0b4dd8196e..2a17dbc2d55 100644 --- a/packages/syft/src/syft/client/syncing.py +++ b/packages/syft/src/syft/client/syncing.py @@ -2,6 +2,7 @@ # stdlib from collections.abc import Collection +import logging # relative from ..abstract_node import NodeSideType @@ -21,6 +22,8 @@ from .sync_decision import SyncDecision from .sync_decision import SyncDirection +logger = logging.getLogger(__name__) + def compare_states( from_state: SyncState, @@ -174,7 +177,7 @@ def handle_sync_batch( ) sync_instructions.append(instruction) - print(f"Decision: Syncing {len(sync_instructions)} objects") + logger.debug(f"Decision: Syncing {len(sync_instructions)} objects") # Apply empty state to source side to signal that we are done syncing res_src = src_client.apply_state(src_resolved_state) @@ -206,7 +209,7 @@ def handle_ignore_batch( for other_batch in other_ignore_batches: other_batch.decision = SyncDecision.IGNORE - print(f"Ignoring other batch with root {other_batch.root_type.__name__}") + logger.debug(f"Ignoring other batch with root {other_batch.root_type.__name__}") src_client = obj_diff_batch.source_client tgt_client = obj_diff_batch.target_client @@ -240,7 +243,7 @@ def handle_unignore_batch( other_batches = [b for b in all_batches if b is not obj_diff_batch] other_unignore_batches = get_other_unignore_batches(obj_diff_batch, other_batches) for other_batch in other_unignore_batches: - print(f"Ignoring other batch with root {other_batch.root_type.__name__}") + logger.debug(f"Ignoring other batch with root {other_batch.root_type.__name__}") other_batch.decision = None src_resolved_state.add_unignored(other_batch.root_id) tgt_resolved_state.add_unignored(other_batch.root_id) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 31ead514b2b..58a3798c5fd 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -8,6 +8,7 @@ from functools import partial import hashlib import json +import logging import os from pathlib import Path import shutil @@ -19,7 +20,6 @@ from typing import Any # third party -from loguru import logger from nacl.signing import SigningKey from result import Err from result import Result @@ -140,6 +140,8 @@ from .credentials import SyftVerifyKey from .worker_settings import WorkerSettings +logger = logging.getLogger(__name__) + # if user code needs to be serded and its not available we can call this to refresh # the code for a specific node UID and thread CODE_RELOADER: dict[int, Callable] = {} @@ -464,7 +466,7 @@ def get_default_store(self, use_sqlite: bool, store_type: str) -> StoreConfig: path = self.get_temp_dir("db") file_name: str = f"{self.id}.sqlite" if self.dev_mode: - print(f"{store_type}'s SQLite DB path: {path/file_name}") + logger.debug(f"{store_type}'s SQLite DB path: {path/file_name}") return SQLiteStoreConfig( client_config=SQLiteStoreClientConfig( filename=file_name, @@ -535,7 +537,7 @@ def create_queue_config( queue_config_ = queue_config elif queue_port is not None or n_consumers > 0 or create_producer: if not create_producer and queue_port is None: - print("No queue port defined to bind consumers.") + logger.warn("No queue port defined to bind consumers.") queue_config_ = ZMQQueueConfig( client_config=ZMQClientConfig( create_producer=create_producer, @@ -590,7 +592,7 @@ def init_queue_manager(self, queue_config: QueueConfig) -> None: else: # Create consumer for given worker pool syft_worker_uid = get_syft_worker_uid() - print( + logger.info( f"Running as consumer with uid={syft_worker_uid} service={service_name}" ) @@ -750,9 +752,8 @@ def find_and_migrate_data(self) -> None: ) if object_pending_migration: - print( - "Object in Document Store that needs migration: ", - object_pending_migration, + logger.debug( + f"Object in Document Store that needs migration: {object_pending_migration}" ) # Migrate data for objects in document store @@ -762,7 +763,7 @@ def find_and_migrate_data(self) -> None: if object_partition is None: continue - print(f"Migrating data for: {canonical_name} table.") + logger.debug(f"Migrating data for: {canonical_name} table.") migration_status = object_partition.migrate_data( to_klass=object_type, context=context ) @@ -779,9 +780,8 @@ def find_and_migrate_data(self) -> None: ) if action_object_pending_migration: - print( - "Object in Action Store that needs migration: ", - action_object_pending_migration, + logger.info( + f"Object in Action Store that needs migration: {action_object_pending_migration}", ) # Migrate data for objects in action store @@ -795,7 +795,7 @@ def find_and_migrate_data(self) -> None: raise Exception( f"Failed to migrate data for {canonical_name}. Error: {migration_status.err()}" ) - print("Data Migrated to latest version !!!") + logger.info("Data Migrated to latest version !!!") @property def guest_client(self) -> SyftClient: @@ -817,7 +817,7 @@ def get_guest_client(self, verbose: bool = True) -> SyftClient: ) if self.node_type: message += f"side {self.node_type.value.capitalize()} > as GUEST" - print(message) + logger.debug(message) client_type = connection.get_client_type() if isinstance(client_type, SyftError): @@ -1265,6 +1265,7 @@ def handle_api_call_with_unsigned_result( _private_api_path = user_config_registry.private_path_for(api_call.path) method = self.get_service_method(_private_api_path) try: + logger.info(f"API Call: {api_call}") result = method(context, *api_call.args, **api_call.kwargs) except PySyftException as e: return e.handle() @@ -1604,7 +1605,9 @@ def create_initial_settings(self, admin_email: str) -> NodeSettings | None: try: settings_stash = SettingsStash(store=self.document_store) if self.signing_key is None: - print("create_initial_settings failed as there is no signing key") + logger.debug( + "create_initial_settings failed as there is no signing key" + ) return None settings_exists = settings_stash.get_all(self.signing_key.verify_key).ok() if settings_exists: @@ -1639,7 +1642,7 @@ def create_initial_settings(self, admin_email: str) -> NodeSettings | None: return result.ok() return None except Exception as e: - print(f"create_initial_settings failed with error {e}") + logger.error("create_initial_settings failed", exc_info=e) return None @@ -1679,7 +1682,7 @@ def create_admin_new( else: raise Exception(f"Could not create user: {result}") except Exception as e: - print("Unable to create new admin", e) + logger.error("Unable to create new admin", exc_info=e) return None @@ -1739,11 +1742,12 @@ def create_default_worker_pool(node: Node) -> SyftError | None: if isinstance(default_worker_pool, SyftError): logger.error( - f"Failed to get default worker pool {default_pool_name}. Error: {default_worker_pool.message}" + f"Failed to get default worker pool {default_pool_name}. " + f"Error: {default_worker_pool.message}" ) return default_worker_pool - print(f"Creating default worker image with tag='{default_worker_tag}'") + logger.info(f"Creating default worker image with tag='{default_worker_tag}'") # Get/Create a default worker SyftWorkerImage default_image = create_default_image( credentials=credentials, @@ -1752,11 +1756,11 @@ def create_default_worker_pool(node: Node) -> SyftError | None: in_kubernetes=in_kubernetes(), ) if isinstance(default_image, SyftError): - print("Failed to create default worker image: ", default_image.message) + logger.error(f"Failed to create default worker image: {default_image.message}") return default_image if not default_image.is_built: - print(f"Building default worker image with tag={default_worker_tag}") + logger.info(f"Building default worker image with tag={default_worker_tag}") image_build_method = node.get_service_method(SyftWorkerImageService.build) # Build the Image for given tag result = image_build_method( @@ -1767,11 +1771,11 @@ def create_default_worker_pool(node: Node) -> SyftError | None: ) if isinstance(result, SyftError): - print("Failed to build default worker image: ", result.message) + logger.error(f"Failed to build default worker image: {result.message}") return None # Create worker pool if it doesn't exists - print( + logger.info( "Setting up worker pool" f"name={default_pool_name} " f"workers={worker_count} " @@ -1802,17 +1806,17 @@ def create_default_worker_pool(node: Node) -> SyftError | None: ) if isinstance(result, SyftError): - print(f"Default worker pool error. {result.message}") + logger.info(f"Default worker pool error. {result.message}") return None for n in range(worker_to_add_): container_status = result[n] if container_status.error: - print( + logger.error( f"Failed to create container: Worker: {container_status.worker}," f"Error: {container_status.error}" ) return None - print("Created default worker pool.") + logger.info("Created default worker pool.") return None diff --git a/packages/syft/src/syft/node/routes.py b/packages/syft/src/syft/node/routes.py index 5b25774ff18..8be45245190 100644 --- a/packages/syft/src/syft/node/routes.py +++ b/packages/syft/src/syft/node/routes.py @@ -1,6 +1,7 @@ # stdlib import base64 import binascii +import logging from typing import Annotated # third party @@ -12,7 +13,6 @@ from fastapi import Response from fastapi.responses import JSONResponse from fastapi.responses import StreamingResponse -from loguru import logger from pydantic import ValidationError import requests @@ -34,6 +34,8 @@ from .credentials import UserLoginCredentials from .worker import Worker +logger = logging.getLogger(__name__) + def make_routes(worker: Worker) -> APIRouter: if TRACE_MODE: @@ -42,8 +44,8 @@ def make_routes(worker: Worker) -> APIRouter: # third party from opentelemetry import trace from opentelemetry.propagate import extract - except Exception: - print("Failed to import opentelemetry") + except Exception as e: + logger.error("Failed to import opentelemetry", exc_info=e) router = APIRouter() @@ -171,7 +173,7 @@ def handle_login(email: str, password: str, node: AbstractNode) -> Response: result = method(context=context) if isinstance(result, SyftError): - logger.bind(payload={"email": email}).error(result.message) + logger.error(f"Login Error: {result.message}. user={email}") response = result else: user_private_key = result @@ -196,7 +198,9 @@ def handle_register(data: bytes, node: AbstractNode) -> Response: result = method(new_user=user_create) if isinstance(result, SyftError): - logger.bind(payload={"user": user_create}).error(result.message) + logger.error( + f"Register Error: {result.message}. user={user_create.model_dump()}" + ) response = SyftError(message=f"{result.message}") else: response = result diff --git a/packages/syft/src/syft/node/server.py b/packages/syft/src/syft/node/server.py index 3da97e4b0a2..43b8359a1f9 100644 --- a/packages/syft/src/syft/node/server.py +++ b/packages/syft/src/syft/node/server.py @@ -2,7 +2,6 @@ import asyncio from collections.abc import Callable from enum import Enum -import logging import multiprocessing import os import platform @@ -144,14 +143,7 @@ async def _run_uvicorn( except Exception: # nosec print(f"Failed to kill python process on port: {port}") - log_level = "critical" - if dev_mode: - log_level = "info" - logging.getLogger("uvicorn").setLevel(logging.CRITICAL) - logging.getLogger("uvicorn.access").setLevel(logging.CRITICAL) - config = uvicorn.Config( - app, host=host, port=port, log_level=log_level, reload=dev_mode - ) + config = uvicorn.Config(app, host=host, port=port, reload=dev_mode) server = uvicorn.Server(config) await server.serve() diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index dc9eb40e81e..6e6043f4e3e 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -7,6 +7,7 @@ from enum import Enum import inspect from io import BytesIO +import logging from pathlib import Path import sys import threading @@ -46,7 +47,6 @@ from ...types.syncable_object import SyncableSyftObject from ...types.uid import LineageID from ...types.uid import UID -from ...util.logger import debug from ...util.util import prompt_warning_message from ..context import AuthedServiceContext from ..response import SyftException @@ -59,6 +59,8 @@ from .action_types import action_type_for_type from .action_types import action_types +logger = logging.getLogger(__name__) + if TYPE_CHECKING: # relative from ..sync.diff_state import AttrDiff @@ -443,9 +445,10 @@ def make_action_side_effect( action_type=context.action_type, ) context.action = action - except Exception: - print(f"make_action_side_effect failed with {traceback.format_exc()}") - return Err(f"make_action_side_effect failed with {traceback.format_exc()}") + except Exception as e: + msg = "make_action_side_effect failed" + logger.error(msg, exc_info=e) + return Err(f"{msg} with {traceback.format_exc()}") return Ok((context, args, kwargs)) @@ -521,7 +524,7 @@ def convert_to_pointers( arg.syft_node_uid = node_uid r = arg._save_to_blob_storage() if isinstance(r, SyftError): - print(r.message) + logger.error(r.message) arg = api.services.action.set(arg) arg_list.append(arg) @@ -539,7 +542,7 @@ def convert_to_pointers( arg.syft_node_uid = node_uid r = arg._save_to_blob_storage() if isinstance(r, SyftError): - print(r.message) + logger.error(r.message) arg = api.services.action.set(arg) kwarg_dict[k] = arg @@ -772,9 +775,8 @@ def reload_cache(self) -> SyftError | None: uid=self.syft_blob_storage_entry_id ) if isinstance(blob_retrieval_object, SyftError): - print( - "Could not fetch actionobject data\n", - blob_retrieval_object, + logger.error( + f"Could not fetch actionobject data: {blob_retrieval_object}" ) return blob_retrieval_object # relative @@ -839,13 +841,15 @@ def _save_to_blob_storage_(self, data: Any) -> SyftError | None: blob_deposit_object.blob_storage_entry_id ) else: - print("cannot save to blob storage") + logger.warn("cannot save to blob storage. allocate_method=None") self.syft_action_data_type = type(data) self._set_reprs(data) self.syft_has_bool_attr = hasattr(data, "__bool__") else: - debug("skipping writing action object to store, passed data was empty.") + logger.debug( + "skipping writing action object to store, passed data was empty." + ) self.syft_action_data_cache = data @@ -1575,7 +1579,7 @@ def _syft_run_pre_hooks__( if result.is_ok(): context, result_args, result_kwargs = result.ok() else: - debug(f"Pre-hook failed with {result.err()}") + logger.debug(f"Pre-hook failed with {result.err()}") if name not in self._syft_dont_wrap_attrs(): if HOOK_ALWAYS in self.syft_pre_hooks__: for hook in self.syft_pre_hooks__[HOOK_ALWAYS]: @@ -1584,7 +1588,7 @@ def _syft_run_pre_hooks__( context, result_args, result_kwargs = result.ok() else: msg = result.err().replace("\\n", "\n") - debug(f"Pre-hook failed with {msg}") + logger.debug(f"Pre-hook failed with {msg}") if self.is_pointer: if name not in self._syft_dont_wrap_attrs(): @@ -1595,7 +1599,7 @@ def _syft_run_pre_hooks__( context, result_args, result_kwargs = result.ok() else: msg = result.err().replace("\\n", "\n") - debug(f"Pre-hook failed with {msg}") + logger.debug(f"Pre-hook failed with {msg}") return context, result_args, result_kwargs @@ -1610,7 +1614,7 @@ def _syft_run_post_hooks__( if result.is_ok(): new_result = result.ok() else: - debug(f"Post hook failed with {result.err()}") + logger.debug(f"Post hook failed with {result.err()}") if name not in self._syft_dont_wrap_attrs(): if HOOK_ALWAYS in self.syft_post_hooks__: @@ -1619,7 +1623,7 @@ def _syft_run_post_hooks__( if result.is_ok(): new_result = result.ok() else: - debug(f"Post hook failed with {result.err()}") + logger.debug(f"Post hook failed with {result.err()}") if self.is_pointer: if name not in self._syft_dont_wrap_attrs(): @@ -1629,7 +1633,7 @@ def _syft_run_post_hooks__( if result.is_ok(): new_result = result.ok() else: - debug(f"Post hook failed with {result.err()}") + logger.debug(f"Post hook failed with {result.err()}") return new_result @@ -1721,7 +1725,7 @@ def _syft_wrap_attribute_for_bool_on_nonbools(self, name: str) -> Any: "[_wrap_attribute_for_bool_on_nonbools] self.syft_action_data already implements the bool operator" ) - debug("[__getattribute__] Handling bool on nonbools") + logger.debug("[__getattribute__] Handling bool on nonbools") context = PreHookContext( obj=self, op_name=name, @@ -1754,7 +1758,7 @@ def _syft_wrap_attribute_for_properties(self, name: str) -> Any: raise RuntimeError( "[_wrap_attribute_for_properties] Use this only on properties" ) - debug(f"[__getattribute__] Handling property {name} ") + logger.debug(f"[__getattribute__] Handling property {name}") context = PreHookContext( obj=self, @@ -1778,7 +1782,7 @@ def _syft_wrap_attribute_for_methods(self, name: str) -> Any: def fake_func(*args: Any, **kwargs: Any) -> Any: return ActionDataEmpty(syft_internal_type=self.syft_internal_type) - debug(f"[__getattribute__] Handling method {name} ") + logger.debug(f"[__getattribute__] Handling method {name}") if ( issubclass(self.syft_action_data_type, ActionDataEmpty) and name not in action_data_empty_must_run @@ -1815,20 +1819,20 @@ def _base_wrapper(*args: Any, **kwargs: Any) -> Any: return post_result if inspect.ismethod(original_func) or inspect.ismethoddescriptor(original_func): - debug("Running method: ", name) + logger.debug(f"Running method: {name}") def wrapper(_self: Any, *args: Any, **kwargs: Any) -> Any: return _base_wrapper(*args, **kwargs) wrapper = types.MethodType(wrapper, type(self)) else: - debug("Running non-method: ", name) + logger.debug(f"Running non-method: {name}") wrapper = _base_wrapper try: wrapper.__doc__ = original_func.__doc__ - debug( + logger.debug( "Found original signature for ", name, inspect.signature(original_func), @@ -1837,7 +1841,7 @@ def wrapper(_self: Any, *args: Any, **kwargs: Any) -> Any: original_func ) except Exception: - debug("name", name, "has no signature") + logger.debug(f"name={name} has no signature") # third party return wrapper @@ -1931,7 +1935,7 @@ def is_link(self) -> bool: def __setattr__(self, name: str, value: Any) -> Any: defined_on_self = name in self.__dict__ or name in self.__private_attributes__ - debug(">> ", name, ", defined_on_self = ", defined_on_self) + logger.debug(f">> {name} defined_on_self={defined_on_self}") # use the custom defined version if defined_on_self: @@ -2180,13 +2184,13 @@ def __int__(self) -> float: def debug_original_func(name: str, func: Callable) -> None: - debug(f"{name} func is:") - debug("inspect.isdatadescriptor", inspect.isdatadescriptor(func)) - debug("inspect.isgetsetdescriptor", inspect.isgetsetdescriptor(func)) - debug("inspect.isfunction", inspect.isfunction(func)) - debug("inspect.isbuiltin", inspect.isbuiltin(func)) - debug("inspect.ismethod", inspect.ismethod(func)) - debug("inspect.ismethoddescriptor", inspect.ismethoddescriptor(func)) + logger.debug(f"{name} func is:") + logger.debug(f"inspect.isdatadescriptor = {inspect.isdatadescriptor(func)}") + logger.debug(f"inspect.isgetsetdescriptor = {inspect.isgetsetdescriptor(func)}") + logger.debug(f"inspect.isfunction = {inspect.isfunction(func)}") + logger.debug(f"inspect.isbuiltin = {inspect.isbuiltin(func)}") + logger.debug(f"inspect.ismethod = {inspect.ismethod(func)}") + logger.debug(f"inspect.ismethoddescriptor = {inspect.ismethoddescriptor(func)}") def is_action_data_empty(obj: Any) -> bool: diff --git a/packages/syft/src/syft/service/action/action_types.py b/packages/syft/src/syft/service/action/action_types.py index 9721a48ec8e..c7bd730d557 100644 --- a/packages/syft/src/syft/service/action/action_types.py +++ b/packages/syft/src/syft/service/action/action_types.py @@ -1,10 +1,12 @@ # stdlib +import logging from typing import Any # relative -from ...util.logger import debug from .action_data_empty import ActionDataEmpty +logger = logging.getLogger(__name__) + action_types: dict = {} @@ -21,7 +23,9 @@ def action_type_for_type(obj_or_type: Any) -> type: obj_or_type = type(obj_or_type) if obj_or_type not in action_types: - debug(f"WARNING: No Type for {obj_or_type}, returning {action_types[Any]}") + logger.debug( + f"WARNING: No Type for {obj_or_type}, returning {action_types[Any]}" + ) return action_types.get(obj_or_type, action_types[Any]) @@ -36,7 +40,7 @@ def action_type_for_object(obj: Any) -> type: _type = type(obj) if _type not in action_types: - debug(f"WARNING: No Type for {_type}, returning {action_types[Any]}") + logger.debug(f"WARNING: No Type for {_type}, returning {action_types[Any]}") return action_types[Any] return action_types[_type] diff --git a/packages/syft/src/syft/service/network/network_service.py b/packages/syft/src/syft/service/network/network_service.py index ac329420168..c32374ade31 100644 --- a/packages/syft/src/syft/service/network/network_service.py +++ b/packages/syft/src/syft/service/network/network_service.py @@ -1,11 +1,11 @@ # stdlib from collections.abc import Callable from enum import Enum +import logging import secrets from typing import Any # third party -from loguru import logger from result import Result # relative @@ -56,6 +56,8 @@ from .routes import NodeRouteType from .routes import PythonNodeRoute +logger = logging.getLogger(__name__) + VerifyKeyPartitionKey = PartitionKey(key="verify_key", type_=SyftVerifyKey) NodeTypePartitionKey = PartitionKey(key="node_type", type_=NodeType) OrderByNamePartitionKey = PartitionKey(key="name", type_=str) diff --git a/packages/syft/src/syft/service/network/node_peer.py b/packages/syft/src/syft/service/network/node_peer.py index c2db506ba23..5835cf7aa9e 100644 --- a/packages/syft/src/syft/service/network/node_peer.py +++ b/packages/syft/src/syft/service/network/node_peer.py @@ -1,6 +1,7 @@ # stdlib from collections.abc import Callable from enum import Enum +import logging # third party from result import Err @@ -35,6 +36,8 @@ from .routes import connection_to_route from .routes import route_to_connection +logger = logging.getLogger(__name__) + @serializable() class NodePeerConnectionStatus(Enum): @@ -245,7 +248,6 @@ def client_with_context( self, context: NodeServiceContext ) -> Result[type[SyftClient], str]: # third party - from loguru import logger if len(self.node_routes) < 1: raise ValueError(f"No routes to peer: {self}") @@ -255,12 +257,11 @@ def client_with_context( try: client_type = connection.get_client_type() except Exception as e: - logger.error( - f"Failed to establish a connection with {self.node_type} '{self.name}'. Exception: {e}" - ) - return Err( + msg = ( f"Failed to establish a connection with {self.node_type} '{self.name}'" ) + logger.error(msg, exc_info=e) + return Err(msg) if isinstance(client_type, SyftError): return Err(client_type.message) return Ok( diff --git a/packages/syft/src/syft/service/network/utils.py b/packages/syft/src/syft/service/network/utils.py index b03bc589d15..476411bc6e6 100644 --- a/packages/syft/src/syft/service/network/utils.py +++ b/packages/syft/src/syft/service/network/utils.py @@ -1,11 +1,9 @@ # stdlib +import logging import threading import time from typing import cast -# third party -from loguru import logger - # relative from ...serde.serializable import serializable from ...types.datetime import DateTime @@ -17,6 +15,8 @@ from .node_peer import NodePeerConnectionStatus from .node_peer import NodePeerUpdate +logger = logging.getLogger(__name__) + @serializable(without=["thread"]) class PeerHealthCheckTask: @@ -63,9 +63,7 @@ def peer_route_heathcheck(self, context: AuthedServiceContext) -> SyftError | No peer_update.ping_status = NodePeerConnectionStatus.TIMEOUT peer_client = None except Exception as e: - logger.error( - f"Failed to create client for peer: {peer} with exception {e}" - ) + logger.error(f"Failed to create client for peer: {peer}", exc_info=e) peer_update.ping_status = NodePeerConnectionStatus.TIMEOUT peer_client = None @@ -97,7 +95,7 @@ def peer_route_heathcheck(self, context: AuthedServiceContext) -> SyftError | No ) if result.is_err(): - logger.info(f"Failed to update peer in stash: {result.err()}") + logger.error(f"Failed to update peer in stash: {result.err()}") return None diff --git a/packages/syft/src/syft/service/notifier/notifier_service.py b/packages/syft/src/syft/service/notifier/notifier_service.py index aedb59b2e24..4c10708f0f0 100644 --- a/packages/syft/src/syft/service/notifier/notifier_service.py +++ b/packages/syft/src/syft/service/notifier/notifier_service.py @@ -2,6 +2,9 @@ # stdlib +# stdlib +import logging + # third party from pydantic import EmailStr from result import Err @@ -22,6 +25,8 @@ from .notifier_enums import NOTIFIERS from .notifier_stash import NotifierStash +logger = logging.getLogger(__name__) + @serializable() class NotifierService(AbstractService): @@ -109,7 +114,7 @@ def turn_on( message="You must provide both server and port to enable notifications." ) - print("[LOG] Got notifier from db") + logging.debug("Got notifier from db") # If no new credentials provided, check for existing ones if not (email_username and email_password): if not (notifier.email_username and notifier.email_password): @@ -119,10 +124,9 @@ def turn_on( + ".settings.enable_notifications(email=<>, password=<>)" ) else: - print("[LOG] No new credentials provided. Using existing ones.") + logging.debug("No new credentials provided. Using existing ones.") email_password = notifier.email_password email_username = notifier.email_username - print("[LOG] Validating credentials...") validation_result = notifier.validate_email_credentials( username=email_username, @@ -132,6 +136,7 @@ def turn_on( ) if validation_result.is_err(): + logging.error(f"Invalid SMTP credentials {validation_result.err()}") return SyftError( message="Invalid SMTP credentials. Please check your username and password." ) @@ -160,8 +165,8 @@ def turn_on( notifier.email_sender = email_sender notifier.active = True - print( - "[LOG] Email credentials are valid. Updating the notifier settings in the db." + logging.debug( + "Email credentials are valid. Updating the notifier settings in the db." ) result = self.stash.update(credentials=context.credentials, settings=notifier) @@ -260,9 +265,8 @@ def init_notifier( sender_not_set = not email_sender and not notifier.email_sender if validation_result.is_err() or sender_not_set: - print( - "Ops something went wrong while trying to setup your notification system.", - "Please check your credentials and configuration.", + logger.error( + f"Notifier validation error - {validation_result.err()}.", ) notifier.active = False else: diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index 968e4b7c975..8793b49ba0a 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -1,13 +1,12 @@ # stdlib +import logging from multiprocessing import Process import threading from threading import Thread import time from typing import Any -from typing import cast # third party -from loguru import logger import psutil from result import Err from result import Ok @@ -34,6 +33,8 @@ from .queue_stash import QueueItem from .queue_stash import Status +logger = logging.getLogger(__name__) + class MonitorThread(threading.Thread): def __init__( @@ -297,7 +298,7 @@ def handle_message(message: bytes, syft_worker_id: UID) -> None: queue_item.node_uid = worker.id job_item.status = JobStatus.PROCESSING - job_item.node_uid = cast(UID, worker.id) + job_item.node_uid = worker.id # type: ignore[assignment] job_item.updated_at = DateTime.now() if syft_worker_id is not None: diff --git a/packages/syft/src/syft/service/queue/zmq_queue.py b/packages/syft/src/syft/service/queue/zmq_queue.py index 4559832f199..08ff386696e 100644 --- a/packages/syft/src/syft/service/queue/zmq_queue.py +++ b/packages/syft/src/syft/service/queue/zmq_queue.py @@ -2,6 +2,7 @@ from binascii import hexlify from collections import defaultdict import itertools +import logging import socketserver import sys import threading @@ -10,7 +11,6 @@ from typing import Any # third party -from loguru import logger from pydantic import field_validator import zmq from zmq import Frame @@ -61,6 +61,8 @@ # Lock for working on ZMQ socket ZMQ_SOCKET_LOCK = threading.Lock() +logger = logging.getLogger(__name__) + class QueueMsgProtocol: W_WORKER = b"MDPW01" @@ -128,6 +130,13 @@ def get_expiry(self) -> float: def reset_expiry(self) -> None: self.expiry_t.reset() + def __str__(self) -> str: + svc = self.service.name if self.service else None + return ( + f"Worker(addr={self.address!r}, id={self.identity!r}, service={svc}, " + f"syft_worker_id={self.syft_worker_id!r})" + ) + @serializable() class ZMQProducer(QueueProducer): @@ -177,7 +186,7 @@ def close(self) -> None: try: self.poll_workers.unregister(self.socket) except Exception as e: - logger.exception("Failed to unregister poller. {}", e) + logger.exception("Failed to unregister poller.", exc_info=e) finally: if self.thread: self.thread.join(THREAD_TIMEOUT_SEC) @@ -232,7 +241,7 @@ def contains_unresolved_action_objects(self, arg: Any, recursion: int = 0) -> bo return True return value except Exception as e: - logger.exception("Failed to resolve action objects. {}", e) + logger.exception("Failed to resolve action objects.", exc_info=e) return True def unwrap_nested_actionobjects(self, data: Any) -> Any: @@ -367,9 +376,7 @@ def read_items(self) -> None: res = self.queue_stash.update(item.syft_client_verify_key, item) if res.is_err(): logger.error( - "Failed to update queue item={} error={}", - item, - res.err(), + f"Failed to update queue item={item} error={res.err()}" ) elif item.status == Status.PROCESSING: # Evaluate Retry condition here @@ -384,9 +391,7 @@ def read_items(self) -> None: res = self.queue_stash.update(item.syft_client_verify_key, item) if res.is_err(): logger.error( - "Failed to update queue item={} error={}", - item, - res.err(), + f"Failed to update queue item={item} error={res.err()}" ) def run(self) -> None: @@ -398,18 +403,18 @@ def run(self) -> None: def send(self, worker: bytes, message: bytes | list[bytes]) -> None: worker_obj = self.require_worker(worker) - self.send_to_worker(worker=worker_obj, msg=message) + self.send_to_worker(worker_obj, QueueMsgProtocol.W_REQUEST, message) def bind(self, endpoint: str) -> None: """Bind producer to endpoint.""" self.socket.bind(endpoint) - logger.info("Producer endpoint: {}", endpoint) + logger.info(f"ZMQProducer endpoint: {endpoint}") def send_heartbeats(self) -> None: """Send heartbeats to idle workers if it's time""" if self.heartbeat_t.has_expired(): for worker in self.waiting: - self.send_to_worker(worker, QueueMsgProtocol.W_HEARTBEAT, None, None) + self.send_to_worker(worker, QueueMsgProtocol.W_HEARTBEAT) self.heartbeat_t.reset() def purge_workers(self) -> None: @@ -420,22 +425,15 @@ def purge_workers(self) -> None: # work on a copy of the iterator for worker in list(self.waiting): if worker.has_expired(): - logger.info( - "Deleting expired Worker id={} uid={} expiry={} now={}", - worker.identity, - worker.syft_worker_id, - worker.get_expiry(), - Timeout.now(), - ) + logger.info(f"Deleting expired worker id={worker}") self.delete_worker(worker, False) def update_consumer_state_for_worker( self, syft_worker_id: UID, consumer_state: ConsumerState ) -> None: if self.worker_stash is None: - # TODO: fix the mypy issue logger.error( # type: ignore[unreachable] - f"Worker stash is not defined for ZMQProducer : {self.queue_name} - {self.id}" + f"ZMQProducer worker stash not defined for {self.queue_name} - {self.id}" ) return @@ -455,14 +453,13 @@ def update_consumer_state_for_worker( ) if res.is_err(): logger.error( - "Failed to update consumer state for worker id={} to state: {} error={}", - syft_worker_id, - consumer_state, - res.err(), + f"Failed to update consumer state for worker id={syft_worker_id} " + f"to state: {consumer_state} error={res.err()}", ) except Exception as e: logger.error( - f"Failed to update consumer state for worker id: {syft_worker_id} to state {consumer_state}. Error: {e}" + f"Failed to update consumer state for worker id: {syft_worker_id} to state {consumer_state}", + exc_info=e, ) def worker_waiting(self, worker: Worker) -> None: @@ -487,13 +484,12 @@ def dispatch(self, service: Service, msg: bytes) -> None: msg = service.requests.pop(0) worker = service.waiting.pop(0) self.waiting.remove(worker) - self.send_to_worker(worker, QueueMsgProtocol.W_REQUEST, None, msg) + self.send_to_worker(worker, QueueMsgProtocol.W_REQUEST, msg) def send_to_worker( self, worker: Worker, - command: bytes = QueueMsgProtocol.W_REQUEST, - option: bytes | None = None, + command: bytes, msg: bytes | list | None = None, ) -> None: """Send message to worker. @@ -510,50 +506,60 @@ def send_to_worker( elif not isinstance(msg, list): msg = [msg] - # Stack routing and protocol envelopes to start of message - # and routing envelope - if option is not None: - msg = [option] + msg - msg = [worker.address, b"", QueueMsgProtocol.W_WORKER, command] + msg + # ZMQProducer send frames: [address, empty, header, command, ...data] + core = [worker.address, b"", QueueMsgProtocol.W_WORKER, command] + msg = core + msg + + if command != QueueMsgProtocol.W_HEARTBEAT: + # log everything except the last frame which contains serialized data + logger.info(f"ZMQProducer send: {core}") - logger.debug("Send: {}", msg) with ZMQ_SOCKET_LOCK: try: self.socket.send_multipart(msg) except zmq.ZMQError as e: - logger.error("Failed to send message to producer. {}", e) + logger.error("ZMQProducer send error", exc_info=e) def _run(self) -> None: - while True: - if self._stop.is_set(): - return + try: + while True: + if self._stop.is_set(): + logger.info("ZMQProducer thread stopped") + return - for service in self.services.values(): - self.dispatch(service, None) + for service in self.services.values(): + self.dispatch(service, None) - items = None + items = None - try: - items = self.poll_workers.poll(ZMQ_POLLER_TIMEOUT_MSEC) - except Exception as e: - logger.exception("Failed to poll items: {}", e) + try: + items = self.poll_workers.poll(ZMQ_POLLER_TIMEOUT_MSEC) + except Exception as e: + logger.exception("ZMQProducer poll error", exc_info=e) - if items: - msg = self.socket.recv_multipart() + if items: + msg = self.socket.recv_multipart() + + if len(msg) < 3: + logger.error(f"ZMQProducer invalid recv: {msg}") + continue - logger.debug("Recieve: {}", msg) + # ZMQProducer recv frames: [address, empty, header, command, ...data] + (address, _, header, command, *data) = msg - address = msg.pop(0) - empty = msg.pop(0) # noqa: F841 - header = msg.pop(0) + if command != QueueMsgProtocol.W_HEARTBEAT: + # log everything except the last frame which contains serialized data + logger.info(f"ZMQProducer recv: {msg[:4]}") - if header == QueueMsgProtocol.W_WORKER: - self.process_worker(address, msg) - else: - logger.error("Invalid message header: {}", header) + if header == QueueMsgProtocol.W_WORKER: + self.process_worker(address, command, data) + else: + logger.error(f"Invalid message header: {header}") - self.send_heartbeats() - self.purge_workers() + self.send_heartbeats() + self.purge_workers() + except Exception as e: + logger.exception("ZMQProducer thread exception", exc_info=e) def require_worker(self, address: bytes) -> Worker: """Finds the worker (creates if necessary).""" @@ -564,16 +570,13 @@ def require_worker(self, address: bytes) -> Worker: self.workers[identity] = worker return worker - def process_worker(self, address: bytes, msg: list[bytes]) -> None: - command = msg.pop(0) - + def process_worker(self, address: bytes, command: bytes, data: list[bytes]) -> None: worker_ready = hexlify(address) in self.workers - worker = self.require_worker(address) if QueueMsgProtocol.W_READY == command: - service_name = msg.pop(0).decode() - syft_worker_id = msg.pop(0).decode() + service_name = data.pop(0).decode() + syft_worker_id = data.pop(0).decode() if worker_ready: # Not first command in session or Reserved service name # If worker was already present, then we disconnect it first @@ -589,18 +592,7 @@ def process_worker(self, address: bytes, msg: list[bytes]) -> None: self.services[service_name] = service if service is not None: worker.service = service - logger.info( - "New Worker service={}, id={}, uid={}", - service.name, - worker.identity, - worker.syft_worker_id, - ) - else: - logger.info( - "New Worker service=None, id={}, uid={}", - worker.identity, - worker.syft_worker_id, - ) + logger.info(f"New worker: {worker}") worker.syft_worker_id = UID(syft_worker_id) self.worker_waiting(worker) @@ -611,19 +603,18 @@ def process_worker(self, address: bytes, msg: list[bytes]) -> None: # if not already present self.worker_waiting(worker) else: - # extract the syft worker id and worker pool name from the message - # Get the corresponding worker pool and worker - # update the status to be unhealthy + logger.info(f"Got heartbeat, but worker not ready. {worker}") self.delete_worker(worker, True) elif QueueMsgProtocol.W_DISCONNECT == command: + logger.info(f"Removing disconnected worker: {worker}") self.delete_worker(worker, False) else: - logger.error("Invalid command: {}", command) + logger.error(f"Invalid command: {command!r}") def delete_worker(self, worker: Worker, disconnect: bool) -> None: """Deletes worker from all data structures, and deletes worker.""" if disconnect: - self.send_to_worker(worker, QueueMsgProtocol.W_DISCONNECT, None, None) + self.send_to_worker(worker, QueueMsgProtocol.W_DISCONNECT) if worker.service and worker in worker.service.waiting: worker.service.waiting.remove(worker) @@ -680,13 +671,12 @@ def reconnect_to_producer(self) -> None: self.socket.connect(self.address) self.poller.register(self.socket, zmq.POLLIN) - logger.info("Connecting Worker id={} to broker addr={}", self.id, self.address) + logger.info(f"Connecting Worker id={self.id} to broker addr={self.address}") # Register queue with the producer self.send_to_producer( QueueMsgProtocol.W_READY, - self.service_name.encode(), - [str(self.syft_worker_id).encode()], + [self.service_name.encode(), str(self.syft_worker_id).encode()], ) def post_init(self) -> None: @@ -704,7 +694,7 @@ def close(self) -> None: try: self.poller.unregister(self.socket) except Exception as e: - logger.exception("Failed to unregister worker. {}", e) + logger.exception("Failed to unregister worker.", exc_info=e) finally: if self.thread is not None: self.thread.join(timeout=THREAD_TIMEOUT_SEC) @@ -715,8 +705,7 @@ def close(self) -> None: def send_to_producer( self, - command: str, - option: bytes | None = None, + command: bytes, msg: bytes | list | None = None, ) -> None: """Send message to producer. @@ -732,23 +721,25 @@ def send_to_producer( elif not isinstance(msg, list): msg = [msg] - if option: - msg = [option] + msg + # ZMQConsumer send frames: [empty, header, command, ...data] + core = [b"", QueueMsgProtocol.W_WORKER, command] + msg = core + msg - msg = [b"", QueueMsgProtocol.W_WORKER, command] + msg - logger.debug("Send: msg={}", msg) + if command != QueueMsgProtocol.W_HEARTBEAT: + logger.info(f"ZMQ Consumer send: {core}") with ZMQ_SOCKET_LOCK: try: self.socket.send_multipart(msg) except zmq.ZMQError as e: - logger.error("Failed to send message to producer. {}", e) + logger.error("ZMQConsumer send error", exc_info=e) def _run(self) -> None: """Send reply, if any, to producer and wait for next request.""" try: while True: if self._stop.is_set(): + logger.info("ZMQConsumer thread stopped") return try: @@ -757,39 +748,38 @@ def _run(self) -> None: logger.info("Context terminated") return except Exception as e: - logger.error("Poll error={}", e) + logger.error("ZMQ poll error", exc_info=e) continue if items: - # Message format: - # [b"", "
", "", "", ""] msg = self.socket.recv_multipart() - logger.debug("Recieve: {}", msg) - # mark as alive self.set_producer_alive() if len(msg) < 3: - logger.error("Invalid message: {}", msg) + logger.error(f"ZMQConsumer invalid recv: {msg}") continue - empty = msg.pop(0) # noqa: F841 - header = msg.pop(0) # noqa: F841 + # Message frames recieved by consumer: + # [empty, header, command, ...data] + (_, _, command, *data) = msg - command = msg.pop(0) + if command != QueueMsgProtocol.W_HEARTBEAT: + # log everything except the last frame which contains serialized data + logger.info(f"ZMQConsumer recv: {msg[:-4]}") if command == QueueMsgProtocol.W_REQUEST: # Call Message Handler try: - message = msg.pop() + message = data.pop() self.associate_job(message) self.message_handler.handle_message( message=message, syft_worker_id=self.syft_worker_id, ) except Exception as e: - logger.exception("Error while handling message. {}", e) + logger.exception("Couldn't handle message", exc_info=e) finally: self.clear_job() elif command == QueueMsgProtocol.W_HEARTBEAT: @@ -797,7 +787,7 @@ def _run(self) -> None: elif command == QueueMsgProtocol.W_DISCONNECT: self.reconnect_to_producer() else: - logger.error("Invalid command: {}", command) + logger.error(f"ZMQConsumer invalid command: {command}") else: if not self.is_producer_alive(): logger.info("Producer check-alive timed out. Reconnecting.") @@ -808,12 +798,11 @@ def _run(self) -> None: except zmq.ZMQError as e: if e.errno == zmq.ETERM: - logger.info("Consumer connection terminated") + logger.info("zmq.ETERM") else: - logger.exception("Consumer error. {}", e) - raise e - - logger.info("Worker finished") + logger.exception("zmq.ZMQError", exc_info=e) + except Exception as e: + logger.exception("ZMQConsumer thread exception", exc_info=e) def set_producer_alive(self) -> None: self.producer_ping_t.reset() @@ -836,7 +825,7 @@ def associate_job(self, message: Frame) -> None: queue_item = _deserialize(message, from_bytes=True) self._set_worker_job(queue_item.job_id) except Exception as e: - logger.exception("Could not associate job. {}", e) + logger.exception("Could not associate job", exc_info=e) def clear_job(self) -> None: self._set_worker_job(None) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 882dd243ec4..d82e1207727 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -3,6 +3,7 @@ from enum import Enum import hashlib import inspect +import logging from typing import Any # third party @@ -58,6 +59,8 @@ from ..response import SyftSuccess from ..user.user import UserView +logger = logging.getLogger(__name__) + @serializable() class RequestStatus(Enum): @@ -158,7 +161,7 @@ def _run( permission=self.apply_permission_type, ) if apply: - print( + logger.debug( "ADDING PERMISSION", requesting_permission_action_obj, id_action ) action_store.add_permission(requesting_permission_action_obj) @@ -182,7 +185,7 @@ def _run( ) return Ok(SyftSuccess(message=f"{type(self)} Success")) except Exception as e: - print(f"failed to apply {type(self)}", e) + logger.error(f"failed to apply {type(self)}", exc_info=e) return Err(SyftError(message=str(e))) def apply(self, context: ChangeContext) -> Result[SyftSuccess, SyftError]: @@ -1317,7 +1320,7 @@ def _run( self.linked_obj.update_with_context(context, updated_status) return Ok(SyftSuccess(message=f"{type(self)} Success")) except Exception as e: - print(f"failed to apply {type(self)}. {e}") + logger.error(f"failed to apply {type(self)}", exc_info=e) return Err(SyftError(message=str(e))) def apply(self, context: ChangeContext) -> Result[SyftSuccess, SyftError]: diff --git a/packages/syft/src/syft/service/service.py b/packages/syft/src/syft/service/service.py index cda115cb8b4..c92695e2f6a 100644 --- a/packages/syft/src/syft/service/service.py +++ b/packages/syft/src/syft/service/service.py @@ -8,6 +8,7 @@ from functools import partial import inspect from inspect import Parameter +import logging from typing import Any from typing import TYPE_CHECKING @@ -43,6 +44,8 @@ from .user.user_roles import ServiceRole from .warnings import APIEndpointWarning +logger = logging.getLogger(__name__) + if TYPE_CHECKING: # relative from ..client.api import APIModule @@ -491,5 +494,5 @@ def from_api_or_context( ) return partial(service_method, node_context) else: - print("Could not get method from api or context") + logger.error("Could not get method from api or context") return None diff --git a/packages/syft/src/syft/service/settings/settings.py b/packages/syft/src/syft/service/settings/settings.py index 94adfbf307c..2db395ce9e5 100644 --- a/packages/syft/src/syft/service/settings/settings.py +++ b/packages/syft/src/syft/service/settings/settings.py @@ -1,5 +1,6 @@ # stdlib from collections.abc import Callable +import logging from typing import Any # third party @@ -29,6 +30,8 @@ from ...util.schema import DEFAULT_WELCOME_MSG from ..response import SyftInfo +logger = logging.getLogger(__name__) + @serializable() class NodeSettingsUpdateV4(PartialSyftObject): @@ -54,8 +57,8 @@ def validate_node_side_type(cls, v: str) -> type[Empty]: as information might be leaked." try: display(SyftInfo(message=msg)) - except Exception: - print(SyftInfo(message=msg)) + except Exception as e: + logger.error(msg, exc_info=e) return Empty diff --git a/packages/syft/src/syft/service/sync/diff_state.py b/packages/syft/src/syft/service/sync/diff_state.py index 24d89af2fd6..031f2240376 100644 --- a/packages/syft/src/syft/service/sync/diff_state.py +++ b/packages/syft/src/syft/service/sync/diff_state.py @@ -5,6 +5,7 @@ from dataclasses import dataclass import enum import html +import logging import operator import textwrap from typing import Any @@ -13,7 +14,6 @@ from typing import TYPE_CHECKING # third party -from loguru import logger import pandas as pd from rich import box from rich.console import Console @@ -61,6 +61,8 @@ from ..user.user import UserView from .sync_state import SyncState +logger = logging.getLogger(__name__) + if TYPE_CHECKING: # relative from .resolve_widget import PaginatedResolveWidget @@ -509,7 +511,6 @@ def _repr_html_(self) -> str: obj_repr += diff.__repr__() + "
" obj_repr = obj_repr.replace("\n", "
") - # print("New lines", res) attr_text = f"

{self.object_type} ObjectDiff:

\n{obj_repr}" return base_str + attr_text @@ -1060,7 +1061,7 @@ def stage_change(self) -> None: other_batch.decision == SyncDecision.IGNORE and other_batch.root_id in required_dependencies ): - print(f"ignoring other batch ({other_batch.root_type.__name__})") + logger.debug(f"ignoring other batch ({other_batch.root_type.__name__})") other_batch.decision = None @@ -1282,7 +1283,7 @@ def apply_previous_ignore_state( if hash(batch) == batch_hash: batch.decision = SyncDecision.IGNORE else: - print( + logger.debug( f"""A batch with type {batch.root_type.__name__} was previously ignored but has changed It will be available for review again.""" ) @@ -1409,7 +1410,7 @@ def _create_batches( # TODO: Figure out nested user codes, do we even need that? root_ids.append(diff.object_id) # type: ignore - elif ( + elif ( # type: ignore[unreachable] isinstance(diff_obj, Job) # type: ignore and diff_obj.parent_job_id is None # ignore Job objects created by TwinAPIEndpoint diff --git a/packages/syft/src/syft/service/sync/sync_service.py b/packages/syft/src/syft/service/sync/sync_service.py index db50c2a7a61..e452b8b0b8e 100644 --- a/packages/syft/src/syft/service/sync/sync_service.py +++ b/packages/syft/src/syft/service/sync/sync_service.py @@ -1,9 +1,9 @@ # stdlib from collections import defaultdict +import logging from typing import Any # third party -from loguru import logger from result import Err from result import Ok from result import Result @@ -36,6 +36,8 @@ from .sync_stash import SyncStash from .sync_state import SyncState +logger = logging.getLogger(__name__) + def get_store(context: AuthedServiceContext, item: SyncableSyftObject) -> Any: if isinstance(item, ActionObject): diff --git a/packages/syft/src/syft/service/worker/utils.py b/packages/syft/src/syft/service/worker/utils.py index c952cbe8c13..c9b930c353c 100644 --- a/packages/syft/src/syft/service/worker/utils.py +++ b/packages/syft/src/syft/service/worker/utils.py @@ -1,5 +1,6 @@ # stdlib import contextlib +import logging import os from pathlib import Path import socket @@ -34,6 +35,8 @@ from .worker_pool import WorkerOrchestrationType from .worker_pool import WorkerStatus +logger = logging.getLogger(__name__) + DEFAULT_WORKER_IMAGE_TAG = "openmined/default-worker-image-cpu:0.0.1" DEFAULT_WORKER_POOL_NAME = "default-pool" K8S_NODE_CREDS_NAME = "node-creds" @@ -261,9 +264,9 @@ def run_workers_in_threads( address=address, ) except Exception as e: - print( - "Failed to start consumer for " - f"pool={pool_name} worker={worker_name}. Error: {e}" + logger.error( + f"Failed to start consumer for pool={pool_name} worker={worker_name}", + exc_info=e, ) worker.status = WorkerStatus.STOPPED error = str(e) @@ -335,12 +338,7 @@ def create_kubernetes_pool( pool = None try: - print( - "Creating new pool " - f"name={pool_name} " - f"tag={tag} " - f"replicas={replicas}" - ) + logger.info(f"Creating new pool name={pool_name} tag={tag} replicas={replicas}") env_vars, mount_secrets = prepare_kubernetes_pool_env( runner, @@ -391,7 +389,7 @@ def scale_kubernetes_pool( return SyftError(message=f"Pool does not exist. name={pool_name}") try: - print(f"Scaling pool name={pool_name} to replicas={replicas}") + logger.info(f"Scaling pool name={pool_name} to replicas={replicas}") runner.scale_pool(pool_name=pool_name, replicas=replicas) except Exception as e: return SyftError(message=f"Failed to scale workers {e}") @@ -520,7 +518,7 @@ def run_containers( if not worker_image.is_built: return SyftError(message="Image must be built before running it.") - print(f"Starting workers with start_idx={start_idx} count={number}") + logger.info(f"Starting workers with start_idx={start_idx} count={number}") if orchestration == WorkerOrchestrationType.DOCKER: with contextlib.closing(docker.from_env()) as client: diff --git a/packages/syft/src/syft/service/worker/worker_pool_service.py b/packages/syft/src/syft/service/worker/worker_pool_service.py index d42645a19bb..a44cf2e2d82 100644 --- a/packages/syft/src/syft/service/worker/worker_pool_service.py +++ b/packages/syft/src/syft/service/worker/worker_pool_service.py @@ -1,4 +1,5 @@ # stdlib +import logging from typing import Any # third party @@ -45,6 +46,8 @@ from .worker_service import WorkerService from .worker_stash import WorkerStash +logger = logging.getLogger(__name__) + @serializable() class SyftWorkerPoolService(AbstractService): @@ -527,7 +530,7 @@ def scale( uid=worker.object_uid, ) if delete_result.is_err(): - print(f"Failed to delete worker: {worker.object_uid}") + logger.error(f"Failed to delete worker: {worker.object_uid}") # update worker_pool worker_pool.max_count = number diff --git a/packages/syft/src/syft/store/blob_storage/__init__.py b/packages/syft/src/syft/store/blob_storage/__init__.py index 15658ad4c8c..b9677eda95b 100644 --- a/packages/syft/src/syft/store/blob_storage/__init__.py +++ b/packages/syft/src/syft/store/blob_storage/__init__.py @@ -44,6 +44,7 @@ from collections.abc import Callable from collections.abc import Generator from io import BytesIO +import logging from typing import Any # third party @@ -74,6 +75,8 @@ from ...types.transforms import make_set_default from ...types.uid import UID +logger = logging.getLogger(__name__) + DEFAULT_TIMEOUT = 10 MAX_RETRIES = 20 @@ -138,11 +141,11 @@ def syft_iter_content( return # If successful, exit the function except requests.exceptions.RequestException as e: if attempt < max_retries: - print( + logger.debug( f"Attempt {attempt}/{max_retries} failed: {e} at byte {current_byte}. Retrying..." ) else: - print(f"Max retries reached. Failed with error: {e}") + logger.error(f"Max retries reached - {e}") raise diff --git a/packages/syft/src/syft/store/blob_storage/seaweedfs.py b/packages/syft/src/syft/store/blob_storage/seaweedfs.py index a63ed8a2d67..03c6f442c26 100644 --- a/packages/syft/src/syft/store/blob_storage/seaweedfs.py +++ b/packages/syft/src/syft/store/blob_storage/seaweedfs.py @@ -1,6 +1,7 @@ # stdlib from collections.abc import Generator from io import BytesIO +import logging import math from queue import Queue import threading @@ -40,6 +41,8 @@ from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...util.constants import DEFAULT_TIMEOUT +logger = logging.getLogger(__name__) + MAX_QUEUE_SIZE = 100 WRITE_EXPIRATION_TIME = 900 # seconds DEFAULT_FILE_PART_SIZE = 1024**3 # 1GB @@ -149,7 +152,7 @@ def add_chunks_to_queue( etags.append({"ETag": etag, "PartNumber": part_no}) except requests.RequestException as e: - print(e) + logger.error(f"Failed to upload file to SeaweedFS - {e}") return SyftError(message=str(e)) mark_write_complete_method = from_api_or_context( diff --git a/packages/syft/src/syft/store/document_store.py b/packages/syft/src/syft/store/document_store.py index fea96e6d456..3d69024c7d4 100644 --- a/packages/syft/src/syft/store/document_store.py +++ b/packages/syft/src/syft/store/document_store.py @@ -350,7 +350,6 @@ def store_query_keys(self, objs: Any) -> QueryKeys: def _thread_safe_cbk(self, cbk: Callable, *args: Any, **kwargs: Any) -> Any | Err: locked = self.lock.acquire(blocking=True) if not locked: - print("FAILED TO LOCK") return Err( f"Failed to acquire lock for the operation {self.lock.lock_name} ({self.lock._lock})" ) diff --git a/packages/syft/src/syft/store/locks.py b/packages/syft/src/syft/store/locks.py index 6a29f6efdfb..48ae6ca1178 100644 --- a/packages/syft/src/syft/store/locks.py +++ b/packages/syft/src/syft/store/locks.py @@ -1,5 +1,6 @@ # stdlib from collections import defaultdict +import logging import threading import time from typing import Any @@ -11,6 +12,7 @@ # relative from ..serde.serializable import serializable +logger = logging.getLogger(__name__) THREAD_FILE_LOCKS: dict[int, dict[str, int]] = defaultdict(dict) @@ -190,7 +192,7 @@ def acquire(self, blocking: bool = True) -> bool: elapsed = time.time() - start_time else: return True - print( + logger.debug( f"Timeout elapsed after {self.timeout} seconds while trying to acquiring lock." ) # third party diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py index 96a0b70b81f..e68b2f13710 100644 --- a/packages/syft/src/syft/store/sqlite_document_store.py +++ b/packages/syft/src/syft/store/sqlite_document_store.py @@ -4,6 +4,7 @@ # stdlib from collections import defaultdict from copy import deepcopy +import logging from pathlib import Path import sqlite3 import tempfile @@ -33,6 +34,8 @@ from .locks import NoLockingConfig from .locks import SyftLock +logger = logging.getLogger(__name__) + # here we can create a single connection per cache_key # since pytest is concurrent processes, we need to isolate each connection # by its filename and optionally the thread that its running in @@ -350,7 +353,7 @@ def __del__(self) -> None: try: self._close() except Exception as e: - print(f"Could not close connection. Error: {e}") + logger.error("Could not close connection", exc_info=e) @serializable() diff --git a/packages/syft/src/syft/types/grid_url.py b/packages/syft/src/syft/types/grid_url.py index 91cf53e46d7..040969c2730 100644 --- a/packages/syft/src/syft/types/grid_url.py +++ b/packages/syft/src/syft/types/grid_url.py @@ -3,6 +3,7 @@ # stdlib import copy +import logging import os import re from urllib.parse import urlparse @@ -15,6 +16,8 @@ from ..serde.serializable import serializable from ..util.util import verify_tls +logger = logging.getLogger(__name__) + @serializable(attrs=["protocol", "host_or_ip", "port", "path", "query"]) class GridURL: @@ -43,7 +46,7 @@ def from_url(cls, url: str | GridURL) -> GridURL: query=getattr(parts, "query", ""), ) except Exception as e: - print(f"Failed to convert url: {url} to GridURL. {e}") + logger.error(f"Failed to convert url: {url} to GridURL. {e}") raise e def __init__( diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index e4daf3a779f..9df3f22300c 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -10,6 +10,7 @@ from hashlib import sha256 import inspect from inspect import Signature +import logging import types from types import NoneType from types import UnionType @@ -48,6 +49,8 @@ from .syft_metaclass import PartialModelMetaclass from .uid import UID +logger = logging.getLogger(__name__) + if TYPE_CHECKING: # relative from ..client.api import SyftAPI @@ -611,8 +614,9 @@ def _syft_keys_types_dict(cls, attr_name: str) -> dict[str, type]: if isinstance(method, types.FunctionType): type_ = method.__annotations__["return"] except Exception as e: - print( - f"Failed to get attribute from key {key} type for {cls} storage. {e}" + logger.error( + f"Failed to get attribute from key {key} type for {cls} storage.", + exc_info=e, ) raise e # EmailStr seems to be lost every time the value is set even with a validator diff --git a/packages/syft/src/syft/types/uid.py b/packages/syft/src/syft/types/uid.py index b4aab67302e..cd3a0dafba5 100644 --- a/packages/syft/src/syft/types/uid.py +++ b/packages/syft/src/syft/types/uid.py @@ -5,6 +5,7 @@ from collections.abc import Callable from collections.abc import Sequence import hashlib +import logging from typing import Any import uuid from uuid import UUID as uuid_type @@ -14,8 +15,8 @@ # relative from ..serde.serializable import serializable -from ..util.logger import critical -from ..util.logger import traceback_and_raise + +logger = logging.getLogger(__name__) @serializable(attrs=["value"]) @@ -81,9 +82,8 @@ def from_string(value: str) -> UID: try: return UID(value=uuid.UUID(value)) except ValueError as e: - critical(f"Unable to convert {value} to UUID. {e}") - traceback_and_raise(e) - raise + logger.critical(f"Unable to convert {value} to UUID. {e}") + raise e @staticmethod def with_seed(value: str) -> UID: diff --git a/packages/syft/src/syft/util/logger.py b/packages/syft/src/syft/util/logger.py deleted file mode 100644 index d9f0611a6c6..00000000000 --- a/packages/syft/src/syft/util/logger.py +++ /dev/null @@ -1,134 +0,0 @@ -# stdlib -from collections.abc import Callable -import logging -import os -import sys -from typing import Any -from typing import NoReturn -from typing import TextIO - -# third party -from loguru import logger - -LOG_FORMAT = "[{time}][{level}][{module}]][{process.id}] {message}" - -logger.remove() -DEFAULT_SINK = "syft_{time}.log" - - -def remove() -> None: - logger.remove() - - -def add( - sink: None | str | os.PathLike | TextIO | logging.Handler = None, - level: str = "ERROR", -) -> None: - sink = DEFAULT_SINK if sink is None else sink - try: - logger.add( - sink=sink, - format=LOG_FORMAT, - enqueue=True, - colorize=False, - diagnose=True, - backtrace=True, - rotation="10 MB", - retention="1 day", - level=level, - ) - except BaseException: - logger.add( - sink=sink, - format=LOG_FORMAT, - colorize=False, - diagnose=True, - backtrace=True, - level=level, - ) - - -def start() -> None: - add(sink=sys.stderr, level="CRITICAL") - - -def stop() -> None: - logger.stop() - - -def traceback_and_raise(e: Any, verbose: bool = False) -> NoReturn: - try: - if verbose: - logger.opt(lazy=True).exception(e) - else: - logger.opt(lazy=True).critical(e) - except BaseException as ex: - logger.debug("failed to print exception", ex) - if not issubclass(type(e), Exception): - e = Exception(e) - raise e - - -def create_log_and_print_function(level: str) -> Callable: - def log_and_print(*args: Any, **kwargs: Any) -> None: - try: - method = getattr(logger.opt(lazy=True), level, None) - if "print" in kwargs and kwargs["print"] is True: - del kwargs["print"] - print(*args, **kwargs) - if "end" in kwargs: - # clean up extra end for printinga - del kwargs["end"] - - if method is not None: - method(*args, **kwargs) - else: - raise Exception(f"no method {level} on logger") - except BaseException as e: - msg = f"failed to log exception. {e}" - try: - logger.debug(msg) - - except Exception as e: - print(f"{msg}. {e}") - - return log_and_print - - -def traceback(*args: Any, **kwargs: Any) -> None: - # caller = inspect.getframeinfo(inspect.stack()[1][0]) - # print(f"traceback:{caller.filename}:{caller.function}:{caller.lineno}") - return create_log_and_print_function(level="exception")(*args, **kwargs) - - -def critical(*args: Any, **kwargs: Any) -> None: - # caller = inspect.getframeinfo(inspect.stack()[1][0]) - # print(f"critical:{caller.filename}:{caller.function}:{caller.lineno}:{args}") - return create_log_and_print_function(level="critical")(*args, **kwargs) - - -def error(*args: Any, **kwargs: Any) -> None: - # caller = inspect.getframeinfo(inspect.stack()[1][0]) - # print(f"error:{caller.filename}:{caller.function}:{caller.lineno}") - return create_log_and_print_function(level="error")(*args, **kwargs) - - -def warning(*args: Any, **kwargs: Any) -> None: - return create_log_and_print_function(level="warning")(*args, **kwargs) - - -def info(*args: Any, **kwargs: Any) -> None: - return create_log_and_print_function(level="info")(*args, **kwargs) - - -def debug(*args: Any) -> None: - debug_msg = " ".join([str(a) for a in args]) - return logger.debug(debug_msg) - - -def _debug(*args: Any, **kwargs: Any) -> None: - return create_log_and_print_function(level="debug")(*args, **kwargs) - - -def trace(*args: Any, **kwargs: Any) -> None: - return create_log_and_print_function(level="trace")(*args, **kwargs) diff --git a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py index ee0576cc206..f623e95b480 100644 --- a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py +++ b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py @@ -1,5 +1,6 @@ # stdlib import json +import logging import secrets from typing import Any @@ -7,7 +8,6 @@ from IPython.display import HTML from IPython.display import display import jinja2 -from loguru import logger # relative from ...assets import load_css @@ -16,6 +16,8 @@ from ...table import prepare_table_data from ..icons import Icon +logger = logging.getLogger(__name__) + DEFAULT_ID_WIDTH = 110 env = jinja2.Environment(loader=jinja2.PackageLoader("syft", "assets/jinja")) # nosec @@ -145,7 +147,7 @@ def build_tabulator_table( return table_html except Exception as e: - logger.debug("error building table", e) + logger.debug("error building table", exc_info=e) return None diff --git a/packages/syft/src/syft/util/table.py b/packages/syft/src/syft/util/table.py index 998e022bdbd..fc5df24578c 100644 --- a/packages/syft/src/syft/util/table.py +++ b/packages/syft/src/syft/util/table.py @@ -3,17 +3,17 @@ from collections.abc import Iterable from collections.abc import Mapping from collections.abc import Set +import logging import re from typing import Any -# third party -from loguru import logger - # relative from .notebook_ui.components.table_template import TABLE_INDEX_KEY from .notebook_ui.components.table_template import create_table_template from .util import full_name_with_qualname +logger = logging.getLogger(__name__) + def _syft_in_mro(self: Any, item: Any) -> bool: if hasattr(type(item), "mro") and type(item) != type: diff --git a/packages/syft/src/syft/util/telemetry.py b/packages/syft/src/syft/util/telemetry.py index 32a57dd0534..d03f240a1de 100644 --- a/packages/syft/src/syft/util/telemetry.py +++ b/packages/syft/src/syft/util/telemetry.py @@ -1,9 +1,12 @@ # stdlib from collections.abc import Callable +import logging import os from typing import Any from typing import TypeVar +logger = logging.getLogger(__name__) + def str_to_bool(bool_str: str | None) -> bool: result = False @@ -27,7 +30,6 @@ def noop(__func_or_class: T, /, *args: Any, **kwargs: Any) -> T: instrument = noop else: try: - print("OpenTelemetry Tracing enabled") service_name = os.environ.get("SERVICE_NAME", "client") jaeger_host = os.environ.get("JAEGER_HOST", "localhost") jaeger_port = int(os.environ.get("JAEGER_PORT", "14268")) @@ -74,6 +76,6 @@ def noop(__func_or_class: T, /, *args: Any, **kwargs: Any) -> T: from .trace_decorator import instrument as _instrument instrument = _instrument - except Exception: # nosec - print("Failed to import opentelemetry") + except Exception as e: + logger.error("Failed to import opentelemetry", exc_info=e) instrument = noop diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index b0affa2b1a0..bbdba2a2e60 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -10,6 +10,7 @@ import functools import hashlib from itertools import repeat +import logging import multiprocessing import multiprocessing as mp from multiprocessing import set_start_method @@ -37,11 +38,7 @@ from nacl.signing import VerifyKey import requests -# relative -from .logger import critical -from .logger import debug -from .logger import error -from .logger import traceback_and_raise +logger = logging.getLogger(__name__) DATASETS_URL = "https://raw.githubusercontent.com/OpenMined/datasets/main" PANDAS_DATA = f"{DATASETS_URL}/pandas_cookbook" @@ -57,9 +54,9 @@ def full_name_with_qualname(klass: type) -> str: if not hasattr(klass, "__module__"): return f"builtins.{get_qualname_for(klass)}" return f"{klass.__module__}.{get_qualname_for(klass)}" - except Exception: + except Exception as e: # try name as backup - print("Failed to get FQN for:", klass, type(klass)) + logger.error(f"Failed to get FQN for: {klass} {type(klass)}", exc_info=e) return full_name_with_name(klass=klass) @@ -70,7 +67,7 @@ def full_name_with_name(klass: type) -> str: return f"builtins.{get_name_for(klass)}" return f"{klass.__module__}.{get_name_for(klass)}" except Exception as e: - print("Failed to get FQN for:", klass, type(klass)) + logger.error(f"Failed to get FQN for: {klass} {type(klass)}", exc_info=e) raise e @@ -107,7 +104,7 @@ def extract_name(klass: type) -> str: return fqn.split(".")[-1] return fqn except Exception as e: - print(f"Failed to get klass name {klass}") + logger.error(f"Failed to get klass name {klass}", exc_info=e) raise e else: raise ValueError(f"Failed to match regex for klass {klass}") @@ -117,9 +114,7 @@ def validate_type(_object: object, _type: type, optional: bool = False) -> Any: if isinstance(_object, _type) or (optional and (_object is None)): return _object - traceback_and_raise( - f"Object {_object} should've been of type {_type}, not {_object}." - ) + raise Exception(f"Object {_object} should've been of type {_type}, not {_object}.") def validate_field(_object: object, _field: str) -> Any: @@ -128,7 +123,7 @@ def validate_field(_object: object, _field: str) -> Any: if object is not None: return object - traceback_and_raise(f"Object {_object} has no {_field} field set.") + raise Exception(f"Object {_object} has no {_field} field set.") def get_fully_qualified_name(obj: object) -> str: @@ -150,7 +145,7 @@ def get_fully_qualified_name(obj: object) -> str: try: fqn += "." + obj.__class__.__name__ except Exception as e: - error(f"Failed to get FQN: {e}") + logger.error(f"Failed to get FQN: {e}") return fqn @@ -175,7 +170,7 @@ def key_emoji(key: object) -> str: hex_chars = bytes(key).hex()[-8:] return char_emoji(hex_chars=hex_chars) except Exception as e: - error(f"Fail to get key emoji: {e}") + logger.error(f"Fail to get key emoji: {e}") pass return "ALL" @@ -332,7 +327,7 @@ def find_available_port( sock.close() except Exception as e: - print(f"Failed to check port {port}. {e}") + logger.error(f"Failed to check port {port}. {e}") sock.close() if search is False and port_available is False: @@ -446,7 +441,7 @@ def obj2pointer_type(obj: object | None = None, fqn: str | None = None) -> type: except Exception as e: # sometimes the object doesn't have a __module__ so you need to use the type # like: collections.OrderedDict - debug( + logger.debug( f"Unable to get get_fully_qualified_name of {type(obj)} trying type. {e}" ) fqn = get_fully_qualified_name(obj=type(obj)) @@ -457,10 +452,8 @@ def obj2pointer_type(obj: object | None = None, fqn: str | None = None) -> type: try: ref = get_loaded_syft().lib_ast.query(fqn, obj_type=type(obj)) - except Exception as e: - log = f"Cannot find {type(obj)} {fqn} in lib_ast. {e}" - critical(log) - raise Exception(log) + except Exception: + raise Exception(f"Cannot find {type(obj)} {fqn} in lib_ast.") return ref.pointer_type diff --git a/ruff.toml b/ruff.toml index 3dccdf65b91..bdf2c46b9cf 100644 --- a/ruff.toml +++ b/ruff.toml @@ -24,6 +24,7 @@ ignore = [ [lint.per-file-ignores] "*.ipynb" = ["E402"] +"__init__.py" = ["F401"] [lint.pycodestyle] max-line-length = 120 From 38ac33f9e992faad70d87b6523ac6fdc5790d840 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 19 Jun 2024 18:47:07 +0200 Subject: [PATCH 200/313] db fixes --- packages/syft/src/syft/node/node.py | 4 +- .../syft/src/syft/service/job/job_stash.py | 27 +------------- packages/syft/src/syft/service/queue/queue.py | 8 ++-- .../syft/src/syft/store/document_store.py | 37 +++++++++++++++++-- .../src/syft/store/sqlite_document_store.py | 5 +++ 5 files changed, 46 insertions(+), 35 deletions(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index dc7f5881dde..4345853ba20 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -1446,7 +1446,9 @@ def add_queueitem_to_queue( ) # 🟡 TODO 36: Needs distributed lock - self.job_stash.set(credentials, job) + job_res = self.job_stash.set(credentials, job) + if job_res.is_err(): + return SyftError(message=f"{job_res.err()}") self.queue_stash.set_placeholder(credentials, queue_item) log_service = self.get_service("logservice") diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 9b9a1c623f7..5f31e3c37f6 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -23,7 +23,7 @@ from ...serde.serializable import serializable from ...service.context import AuthedServiceContext from ...service.worker.worker_pool import SyftWorker -from ...store.document_store import BaseStash +from ...store.document_store import BaseUIDStoreStash from ...store.document_store import DocumentStore from ...store.document_store import PartitionKey from ...store.document_store import PartitionSettings @@ -820,7 +820,7 @@ def from_job( @instrument @serializable() -class JobStash(BaseStash): +class JobStash(BaseUIDStoreStash): object_type = Job settings: PartitionSettings = PartitionSettings( name=Job.__canonical_name__, object_type=Job @@ -863,29 +863,6 @@ def get_by_result_id( else: return Ok(res[0]) - def set_placeholder( - self, - credentials: SyftVerifyKey, - item: Job, - add_permissions: list[ActionObjectPermission] | None = None, - ) -> Result[Job, str]: - # 🟡 TODO 36: Needs distributed lock - if not item.resolved: - exists = self.get_by_uid(credentials, item.id) - if exists.is_ok() and exists.ok() is None: - valid = self.check_type(item, self.object_type) - if valid.is_err(): - return SyftError(message=valid.err()) - return super().set(credentials, item, add_permissions) - return item - - def get_by_uid( - self, credentials: SyftVerifyKey, uid: UID - ) -> Result[Job | None, str]: - qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) - item = self.query_one(credentials=credentials, qks=qks) - return item - def get_by_parent_id( self, credentials: SyftVerifyKey, uid: UID ) -> Result[Job | None, str]: diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index 968e4b7c975..6bfb7bd6f89 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -173,8 +173,6 @@ def handle_message_multiprocessing( migrate=False, ) - job_item = worker.job_stash.get_by_uid(credentials, queue_item.job_id).ok() - # Set monitor thread for this job. monitor_thread = MonitorThread(queue_item, worker, credentials) monitor_thread.start() @@ -238,9 +236,9 @@ def handle_message_multiprocessing( queue_item.status = status # get new job item to get latest iter status - job_item = worker.job_stash.get_by_uid(credentials, job_item.id).ok() - - # if result.is_ok(): + job_item = worker.job_stash.get_by_uid(credentials, queue_item.job_id).ok() + if job_item is None: + raise Exception(f"Job {queue_item.job_id} not found!") job_item.node_uid = worker.id job_item.result = result diff --git a/packages/syft/src/syft/store/document_store.py b/packages/syft/src/syft/store/document_store.py index fea96e6d456..2f7e3230f90 100644 --- a/packages/syft/src/syft/store/document_store.py +++ b/packages/syft/src/syft/store/document_store.py @@ -3,6 +3,7 @@ # stdlib from collections.abc import Callable +import threading import types import typing from typing import Any @@ -640,7 +641,12 @@ def set( add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[BaseStash.object_type, str]: - return self.partition.set( + if type(obj).__name__ == "Job": + print( + f"START Setting Job {obj.id}, thread {threading.current_thread().ident}" + ) + + res = self.partition.set( credentials=credentials, obj=obj, ignore_duplicates=ignore_duplicates, @@ -648,6 +654,13 @@ def set( add_storage_permission=add_storage_permission, ) + if type(obj).__name__ == "Job": + print( + f"END Setting Job {obj.id}, thread {threading.current_thread().ident}" + ) + + return res + def query_all( self, credentials: SyftVerifyKey, @@ -744,10 +757,22 @@ def update( obj: BaseStash.object_type, has_permission: bool = False, ) -> Result[BaseStash.object_type, str]: + if type(obj).__name__ == "Job": + print( + f"START Updating Job {obj.id}, thread {threading.current_thread().ident}" + ) qk = self.partition.store_query_key(obj) - return self.partition.update( + res = self.partition.update( credentials=credentials, qk=qk, obj=obj, has_permission=has_permission ) + if type(obj).__name__ == "Job": + print( + f"END Updating Job {obj.id}, thread {threading.current_thread().ident}, res: {res}, obj: {obj}" + ) + qks = QueryKeys(qks=[UIDPartitionKey.with_obj(obj.id)]) + r = self.query_one(credentials=credentials, qks=qks) + print(f"Job {obj.id} found: {r}") + return res @instrument @@ -764,8 +789,12 @@ def delete_by_uid( def get_by_uid( self, credentials: SyftVerifyKey, uid: UID ) -> Result[BaseUIDStoreStash.object_type | None, str]: - qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) - return self.query_one(credentials=credentials, qks=qks) + res = self.partition.get(credentials=credentials, uid=uid) + + # NOTE Return Ok(None) when no results are found for backwards compatibility + if res.is_err(): + return Ok(None) + return res def set( self, diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py index 96a0b70b81f..30d2e0efd19 100644 --- a/packages/syft/src/syft/store/sqlite_document_store.py +++ b/packages/syft/src/syft/store/sqlite_document_store.py @@ -165,7 +165,12 @@ def _close(self) -> None: if REF_COUNTS[cache_key(self.db_filename)] <= 0: # once you close it seems like other object references can't re-use the # same connection + self.db.close() + db_key = cache_key(self.db_filename) + if db_key in SQLITE_CONNECTION_POOL_CUR: + # NOTE if we don't remove the cursor, the cursor cache_key can clash with a future thread id + del SQLITE_CONNECTION_POOL_CUR[db_key] del SQLITE_CONNECTION_POOL_DB[cache_key(self.db_filename)] else: # don't close yet because another SQLiteBackingStore is probably still open From 8abbacc8e6423d544e3488f9e98a1aef17c37e0d Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 19 Jun 2024 19:04:14 +0200 Subject: [PATCH 201/313] add notebook --- notebooks/auto_sync.ipynb | 3584 +++++++++++++++++++++++++++++++++++++ 1 file changed, 3584 insertions(+) create mode 100644 notebooks/auto_sync.ipynb diff --git a/notebooks/auto_sync.ipynb b/notebooks/auto_sync.ipynb new file mode 100644 index 00000000000..f69f49e0e64 --- /dev/null +++ b/notebooks/auto_sync.ipynb @@ -0,0 +1,3584 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "2e4548e0-20ce-472d-aa0f-e1c29952b694", + "metadata": {}, + "outputs": [], + "source": [ + "import time\n", + "\n", + "import syft as sy\n", + "from syft.client.domain_client import DomainClient\n", + "from syft.client.syncing import compare_clients\n", + "from syft.service.code.user_code import UserCode\n", + "from syft.service.job.job_stash import Job, JobStatus\n", + "from syft.service.request.request import Request\n", + "from syft.service.sync.diff_state import ObjectDiffBatch\n", + "\n", + "\n", + "def is_request_to_sync(batch: ObjectDiffBatch) -> bool:\n", + " # True if this is a new low-side request\n", + " # TODO add condition for sql requests/usercodes\n", + " low_request = batch.root.low_obj\n", + " return isinstance(low_request, Request) and batch.status == \"NEW\"\n", + "\n", + "\n", + "def is_job_to_sync(batch: ObjectDiffBatch):\n", + " # True if this is a new high-side job that is either COMPLETED or ERRORED\n", + " if batch.status != \"NEW\":\n", + " return False\n", + " if not isinstance(batch.root.high_obj, Job):\n", + " return False\n", + " job = batch.root.high_obj\n", + " return job.status in (JobStatus.ERRORED, JobStatus.COMPLETED)\n", + "\n", + "\n", + "def sync_new_requests(\n", + " client_low: DomainClient,\n", + " client_high: DomainClient,\n", + ") -> dict[sy.UID, sy.SyftSuccess | sy.SyftError] | sy.SyftError:\n", + " sync_request_results = {}\n", + " diff = compare_clients(\n", + " from_client=client_low, to_client=client_high, include_types=[\"request\"]\n", + " )\n", + " if isinstance(diff, sy.SyftError):\n", + " print(diff)\n", + " return sync_request_results\n", + " for batch in diff.batches:\n", + " if is_request_to_sync(batch):\n", + " request_id = batch.root.low_obj.id\n", + " w = batch.resolve()\n", + " result = w.click_sync()\n", + " sync_request_results[request_id] = result\n", + " return sync_request_results\n", + "\n", + "\n", + "def execute_requests(\n", + " client_high: DomainClient, request_ids: list[sy.UID]\n", + ") -> dict[sy.UID, Job]:\n", + " jobs_by_request_id = {}\n", + " for request_id in request_ids:\n", + " request = client_high.requests.get_by_uid(request_id)\n", + " if not isinstance(request, Request):\n", + " continue\n", + "\n", + " code = request.code\n", + " if not isinstance(code, UserCode):\n", + " continue\n", + "\n", + " func_name = request.code.service_func_name\n", + " api_func = getattr(client_high.code, func_name, None)\n", + " if api_func is None:\n", + " continue\n", + "\n", + " job = api_func(blocking=False)\n", + " jobs_by_request_id[request_id] = job\n", + " # sleep to prevent SQLite connection pool issues\n", + " time.sleep(1)\n", + "\n", + " return jobs_by_request_id\n", + "\n", + "\n", + "def sync_and_execute_new_requests(\n", + " client_low: DomainClient, client_high: DomainClient\n", + ") -> None:\n", + " sync_results = sync_new_requests(client_low, client_high)\n", + " if isinstance(sync_results, sy.SyftError):\n", + " print(sync_results)\n", + " return\n", + "\n", + " request_ids = [\n", + " uid for uid, res in sync_results.items() if isinstance(res, sy.SyftSuccess)\n", + " ]\n", + " print(f\"Synced {len(request_ids)} new requests\")\n", + "\n", + " jobs_by_request = execute_requests(client_high, request_ids)\n", + " print(f\"Started {len(jobs_by_request)} new jobs\")\n", + "\n", + "\n", + "def sync_finished_jobs(\n", + " client_low: DomainClient,\n", + " client_high: DomainClient,\n", + ") -> dict[sy.UID, sy.SyftError | sy.SyftSuccess] | sy.SyftError:\n", + " sync_job_results = {}\n", + " diff = compare_clients(\n", + " from_client=client_high, to_client=client_low, include_types=[\"job\"]\n", + " )\n", + " if isinstance(diff, sy.SyftError):\n", + " print(diff)\n", + " return diff\n", + "\n", + " for batch in diff.batches:\n", + " if is_job_to_sync(batch):\n", + " batch_id = batch.root.high_obj.id\n", + " w = batch.resolve()\n", + " share_result = w.click_share_all_private_data()\n", + " if isinstance(share_result, sy.SyftError):\n", + " sync_job_results[batch_id] = share_result\n", + " continue\n", + " sync_result = w.click_sync()\n", + " sync_job_results[batch_id] = sync_result\n", + "\n", + " print(f\"Sharing {len(sync_job_results)} new results\")\n", + " return sync_job_results\n", + "\n", + "\n", + "def auto_sync(client_low: DomainClient, client_high: DomainClient) -> None:\n", + " print(\"Starting auto sync\")\n", + " sync_and_execute_new_requests(client_low, client_high)\n", + " sync_finished_jobs(client_low, client_high)\n", + " print(\"Finished auto sync\")\n", + "\n", + "\n", + "def auto_sync_loop(\n", + " client_low: DomainClient, client_high: DomainClient, sleep_seconds: int = 60\n", + ") -> None:\n", + " while True:\n", + " auto_sync(client_low, client_high)\n", + " time.sleep(sleep_seconds)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "197db367-bc49-4a41-ba94-756e8b8b4bf4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Staging Protocol Changes...\n", + "Document Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/8fb2b26c3b5d4db2a0cb775fe2a3d825/db/8fb2b26c3b5d4db2a0cb775fe2a3d825.sqlite\n", + "Action Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/8fb2b26c3b5d4db2a0cb775fe2a3d825/db/8fb2b26c3b5d4db2a0cb775fe2a3d825.sqlite\n", + "Creating default worker image with tag='local-dev'\n", + "Setting up worker poolname=default-pool workers=0 image_uid=ecdf0b0b455f423a9c5c84b1f51be2b2 in_memory=True\n", + "Created default worker pool.\n", + "Data Migrated to latest version !!!\n", + "Staging Protocol Changes...\n", + "Document Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", + "Action Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", + "Creating default worker image with tag='local-dev'\n", + "Setting up worker poolname=default-pool workers=4 image_uid=7e1a9ab048964c71bda02800733738e6 in_memory=True\n", + "Created default worker pool.\n", + "Data Migrated to latest version !!!\n", + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + } + ], + "source": [ + "low_side = sy.orchestra.launch(\n", + " name=\"low-side\",\n", + " node_side_type=\"low\",\n", + " local_db=True,\n", + " reset=True,\n", + " dev_mode=True,\n", + ")\n", + "\n", + "high_side = sy.orchestra.launch(\n", + " name=\"high-side\",\n", + " node_side_type=\"high\",\n", + " local_db=True,\n", + " reset=True,\n", + " n_consumers=4,\n", + " create_producer=True,\n", + " dev_mode=True,\n", + ")\n", + "\n", + "client_high = high_side.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "client_low = low_side.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "client_low.register(\n", + " email=\"newuser@openmined.org\", name=\"John Doe\", password=\"pw\", password_verify=\"pw\"\n", + ")\n", + "client_low_ds = low_side.login(email=\"newuser@openmined.org\", password=\"pw\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "ba5648cc-db15-4a07-bbbe-76bef5b270c8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" + ], + "text/plain": [ + "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
SyftSuccess:
Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" + ], + "text/plain": [ + "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
SyftSuccess:
Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" + ], + "text/plain": [ + "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
SyftSuccess:
Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" + ], + "text/plain": [ + "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
SyftSuccess:
Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" + ], + "text/plain": [ + "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "def make_request(client):\n", + " existing_requests = client.requests.get_all()\n", + "\n", + " @sy.syft_function_single_use()\n", + " def func():\n", + " return 10\n", + "\n", + " func.func_name = f\"query_{len(existing_requests)}\"\n", + " func.code = func.code.replace(\"def func(\", f\"def {func.func_name}(\")\n", + "\n", + " res = client.code.request_code_execution(func)\n", + " return res\n", + "\n", + "\n", + "for _ in range(5):\n", + " make_request(client_low_ds)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "69d53f0f-0c49-460e-bf13-e3f53415193a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Starting auto sync\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Decision: Syncing 2 objects\n", + "Decision: Syncing 2 objects\n", + "Decision: Syncing 2 objects\n", + "Decision: Syncing 2 objects\n", + "Decision: Syncing 2 objects\n", + "Synced 5 new requests\n", + "START Setting Job 0588393eb0bc4e74a788cc48780a60e2, thread 8023429120\n", + "END Setting Job 0588393eb0bc4e74a788cc48780a60e2, thread 8023429120\n", + "START Updating Job 0588393eb0bc4e74a788cc48780a60e2, thread 11293306880\n", + "END Updating Job 0588393eb0bc4e74a788cc48780a60e2, thread 11293306880, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 0588393eb0bc4e74a788cc48780a60e2 found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job 0588393eb0bc4e74a788cc48780a60e2, thread 11633979392\n", + "END Updating Job 0588393eb0bc4e74a788cc48780a60e2, thread 11633979392, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 0588393eb0bc4e74a788cc48780a60e2 found: Ok(syft.service.job.job_stash.Job)\n", + "START Setting Job c586ffdd174d441eb7c1ca23633629b1, thread 8023429120\n", + "END Setting Job c586ffdd174d441eb7c1ca23633629b1, thread 8023429120\n", + "START Setting Job f45877b68917412bb85b806e179871ed, thread 8023429120\n", + "END Setting Job f45877b68917412bb85b806e179871ed, thread 8023429120\n", + "START Updating Job c586ffdd174d441eb7c1ca23633629b1, thread 11275333632\n", + "END Updating Job c586ffdd174d441eb7c1ca23633629b1, thread 11275333632, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job c586ffdd174d441eb7c1ca23633629b1 found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job f45877b68917412bb85b806e179871ed, thread 11311280128\n", + "END Updating Job f45877b68917412bb85b806e179871ed, thread 11311280128, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job f45877b68917412bb85b806e179871ed found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job c586ffdd174d441eb7c1ca23633629b1, thread 11668025344\n", + "END Updating Job c586ffdd174d441eb7c1ca23633629b1, thread 11668025344, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job c586ffdd174d441eb7c1ca23633629b1 found: Ok(syft.service.job.job_stash.Job)\n", + "START Setting Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 8023429120\n", + "END Setting Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 8023429120\n", + "START Updating Job f45877b68917412bb85b806e179871ed, thread 11827965952\n", + "END Updating Job f45877b68917412bb85b806e179871ed, thread 11827965952, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job f45877b68917412bb85b806e179871ed found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 11511902208\n", + "END Updating Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 11511902208, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job cc2aaa7f2c7542a69e3fac248d5acd8d found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 11827965952\n", + "END Updating Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 11827965952, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job cc2aaa7f2c7542a69e3fac248d5acd8d found: Ok(syft.service.job.job_stash.Job)\n", + "START Setting Job 0537c2e924334bedbd2b641625905cc4, thread 8023429120\n", + "END Setting Job 0537c2e924334bedbd2b641625905cc4, thread 8023429120\n", + "START Updating Job 0537c2e924334bedbd2b641625905cc4, thread 11293306880\n", + "END Updating Job 0537c2e924334bedbd2b641625905cc4, thread 11293306880, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 0537c2e924334bedbd2b641625905cc4 found: Ok(syft.service.job.job_stash.Job)\n", + "Started 5 new jobs\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "START Updating Job 0537c2e924334bedbd2b641625905cc4, thread 11639091200\n", + "END Updating Job 0537c2e924334bedbd2b641625905cc4, thread 11639091200, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 0537c2e924334bedbd2b641625905cc4 found: Ok(syft.service.job.job_stash.Job)\n", + "Decision: Syncing 5 objects\n", + "START Setting Job f45877b68917412bb85b806e179871ed, thread 8023429120\n", + "END Setting Job f45877b68917412bb85b806e179871ed, thread 8023429120\n", + "Decision: Syncing 5 objects\n", + "START Setting Job 0588393eb0bc4e74a788cc48780a60e2, thread 8023429120\n", + "END Setting Job 0588393eb0bc4e74a788cc48780a60e2, thread 8023429120\n", + "Decision: Syncing 5 objects\n", + "START Setting Job c586ffdd174d441eb7c1ca23633629b1, thread 8023429120\n", + "END Setting Job c586ffdd174d441eb7c1ca23633629b1, thread 8023429120\n", + "Decision: Syncing 5 objects\n", + "START Setting Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 8023429120\n", + "END Setting Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 8023429120\n", + "Sharing 4 new results\n", + "Finished auto sync\n" + ] + } + ], + "source": [ + "auto_sync(client_low, client_high)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "c6a426e8-314a-4a44-8bbe-47f8f8663835", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "START Setting Job 504dfa7a0309408dad11188df0a267e9, thread 8023429120\n", + "END Setting Job 504dfa7a0309408dad11188df0a267e9, thread 8023429120\n", + "syft.service.job.job_stash.Job\n", + "START Setting Job c9d754572ff14cc285c01b4b4e8bb86e, thread 8023429120\n", + "END Setting Job c9d754572ff14cc285c01b4b4e8bb86e, thread 8023429120\n", + "syft.service.job.job_stash.Job\n", + "START Setting Job 449c70aaecc4479f92835cf633985815, thread 8023429120\n", + "END Setting Job 449c70aaecc4479f92835cf633985815, thread 8023429120\n", + "syft.service.job.job_stash.Job\n", + "START Setting Job da0238a7f7794ef79bc49b987cf2f22d, thread 8023429120\n", + "END Setting Job da0238a7f7794ef79bc49b987cf2f22d, thread 8023429120\n", + "syft.service.job.job_stash.Job\n", + "START Setting Job 099e40c538e44232969b439589226e10, thread 8023429120\n", + "END Setting Job 099e40c538e44232969b439589226e10, thread 8023429120\n", + "syft.service.job.job_stash.Job\n", + "START Setting Job 011a06deaf05409983cc924cfc13f8fe, thread 8023429120\n", + "END Setting Job 011a06deaf05409983cc924cfc13f8fe, thread 8023429120\n", + "syft.service.job.job_stash.Job\n", + "START Setting Job dcfa4476caa44186a2553d40e4a7bead, thread 8023429120\n", + "END Setting Job dcfa4476caa44186a2553d40e4a7bead, thread 8023429120\n", + "syft.service.job.job_stash.Job\n", + "START Setting Job 15cd03cde93d4c92b6347a452cde32ac, thread 8023429120\n", + "END Setting Job 15cd03cde93d4c92b6347a452cde32ac, thread 8023429120\n", + "syft.service.job.job_stash.Job\n", + "START Setting Job e05894266e6445259f7515e8441751a7, thread 8023429120\n", + "END Setting Job e05894266e6445259f7515e8441751a7, thread 8023429120\n", + "syft.service.job.job_stash.Job\n", + "START Setting Job 3da0ad0017134db490b0b3f24d78e56f, thread 8023429120\n", + "END Setting Job 3da0ad0017134db490b0b3f24d78e56f, thread 8023429120\n", + "syft.service.job.job_stash.Job\n", + "START Updating Job da0238a7f7794ef79bc49b987cf2f22d, thread 11311280128\n", + "END Updating Job da0238a7f7794ef79bc49b987cf2f22d, thread 11311280128, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job da0238a7f7794ef79bc49b987cf2f22d found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job 504dfa7a0309408dad11188df0a267e9, thread 11511902208\n", + "START Updating Job 099e40c538e44232969b439589226e10, thread 11293306880\n", + "START Updating Job dcfa4476caa44186a2553d40e4a7bead, thread 11275333632\n", + "END Updating Job 099e40c538e44232969b439589226e10, thread 11293306880, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 099e40c538e44232969b439589226e10 found: Ok(None)\n", + "END Updating Job 504dfa7a0309408dad11188df0a267e9, thread 11511902208, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 504dfa7a0309408dad11188df0a267e9 found: Ok(syft.service.job.job_stash.Job)\n", + "END Updating Job dcfa4476caa44186a2553d40e4a7bead, thread 11275333632, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job dcfa4476caa44186a2553d40e4a7bead found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job da0238a7f7794ef79bc49b987cf2f22d, thread 11827965952\n", + "START Updating Job 504dfa7a0309408dad11188df0a267e9, thread 11878445056\n", + "START Updating Job 099e40c538e44232969b439589226e10, thread 11861618688\n", + "END Updating Job da0238a7f7794ef79bc49b987cf2f22d, thread 11827965952, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job da0238a7f7794ef79bc49b987cf2f22d found: Ok(syft.service.job.job_stash.Job)\n", + "END Updating Job 099e40c538e44232969b439589226e10, thread 11861618688, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 099e40c538e44232969b439589226e10 found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job dcfa4476caa44186a2553d40e4a7bead, thread 11895271424\n", + "END Updating Job 504dfa7a0309408dad11188df0a267e9, thread 11878445056, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 504dfa7a0309408dad11188df0a267e9 found: Ok(syft.service.job.job_stash.Job)\n", + "END Updating Job dcfa4476caa44186a2553d40e4a7bead, thread 11895271424, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job dcfa4476caa44186a2553d40e4a7bead found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job 011a06deaf05409983cc924cfc13f8fe, thread 11311280128\n", + "END Updating Job 011a06deaf05409983cc924cfc13f8fe, thread 11311280128, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 011a06deaf05409983cc924cfc13f8fe found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job 15cd03cde93d4c92b6347a452cde32ac, thread 11511902208\n", + "START Updating Job 449c70aaecc4479f92835cf633985815, thread 11293306880\n", + "END Updating Job 15cd03cde93d4c92b6347a452cde32ac, thread 11511902208, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 15cd03cde93d4c92b6347a452cde32ac found: Ok(syft.service.job.job_stash.Job)\n", + "END Updating Job 449c70aaecc4479f92835cf633985815, thread 11293306880, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 449c70aaecc4479f92835cf633985815 found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job c9d754572ff14cc285c01b4b4e8bb86e, thread 11275333632\n", + "END Updating Job c9d754572ff14cc285c01b4b4e8bb86e, thread 11275333632, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job c9d754572ff14cc285c01b4b4e8bb86e found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job 011a06deaf05409983cc924cfc13f8fe, thread 11827965952\n", + "END Updating Job 011a06deaf05409983cc924cfc13f8fe, thread 11827965952, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 011a06deaf05409983cc924cfc13f8fe found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job 15cd03cde93d4c92b6347a452cde32ac, thread 11861618688\n", + "START Updating Job 449c70aaecc4479f92835cf633985815, thread 11878445056\n", + "END Updating Job 15cd03cde93d4c92b6347a452cde32ac, thread 11861618688, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 15cd03cde93d4c92b6347a452cde32ac found: Ok(syft.service.job.job_stash.Job)\n", + "END Updating Job 449c70aaecc4479f92835cf633985815, thread 11878445056, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 449c70aaecc4479f92835cf633985815 found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job e05894266e6445259f7515e8441751a7, thread 11311280128\n", + "START Updating Job c9d754572ff14cc285c01b4b4e8bb86e, thread 11895271424\n", + "END Updating Job e05894266e6445259f7515e8441751a7, thread 11311280128, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job e05894266e6445259f7515e8441751a7 found: Ok(syft.service.job.job_stash.Job)\n", + "END Updating Job c9d754572ff14cc285c01b4b4e8bb86e, thread 11895271424, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job c9d754572ff14cc285c01b4b4e8bb86e found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job 3da0ad0017134db490b0b3f24d78e56f, thread 11293306880\n", + "END Updating Job 3da0ad0017134db490b0b3f24d78e56f, thread 11293306880, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 3da0ad0017134db490b0b3f24d78e56f found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job e05894266e6445259f7515e8441751a7, thread 11827965952\n", + "END Updating Job e05894266e6445259f7515e8441751a7, thread 11827965952, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job e05894266e6445259f7515e8441751a7 found: Ok(syft.service.job.job_stash.Job)\n", + "START Updating Job 3da0ad0017134db490b0b3f24d78e56f, thread 11861618688\n", + "END Updating Job 3da0ad0017134db490b0b3f24d78e56f, thread 11861618688, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", + "Job 3da0ad0017134db490b0b3f24d78e56f found: Ok(syft.service.job.job_stash.Job)\n" + ] + } + ], + "source": [ + "for _ in range(10):\n", + " j = client_high.code.query_0(blocking=False)\n", + " print(j)" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "2eb8c549-ddb8-4970-bf36-2c6bb77b3eb4", + "metadata": {}, + "outputs": [ + { + "ename": "TypeError", + "evalue": "BaseStash.query_one() got multiple values for argument 'credentials'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[30], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mhigh_side\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpython_node\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mjob_stash\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mquery_one\u001b[49m\u001b[43m(\u001b[49m\u001b[43msy\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mUID\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m72a80389d09043e087aa5be880df38e8\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcredentials\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mclient_high\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mverify_key\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mTypeError\u001b[0m: BaseStash.query_one() got multiple values for argument 'credentials'" + ] + } + ], + "source": [ + "high_side.python_node.job_stash.query_one(sy.UID(\"72a80389d09043e087aa5be880df38e8\"), credentials=client_high.verify_key)" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "id": "e96839aa-dfcc-4933-967a-7e32cac1b9ef", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Ok(None)" + ] + }, + "execution_count": 36, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from syft.store.document_store import QueryKeys, UIDPartitionKey\n", + "\n", + "job_stash = high_side.python_node.job_stash\n", + "credentials = client_high.verify_key\n", + "uid = sy.UID(\"72a80389d09043e087aa5be880df38e8\")\n", + "qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)])\n", + "r = job_stash.query_one(credentials=credentials, qks=qks)\n", + "\n", + "r" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "id": "2b3d4a11-a50d-41d2-acc9-cd171b1e757a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{: ,\n", + " : ,\n", + " : ,\n", + " : ,\n", + " : ,\n", + " : ,\n", + " : ,\n", + " : ,\n", + " : ,\n", + " : ,\n", + " : ,\n", + " : ,\n", + " : ,\n", + " : }" + ] + }, + "execution_count": 59, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "job_stash.partition.unique_keys[\"id\"]" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "id": "d3446d13-35f0-46c9-8914-414095998c49", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "> \u001b[0;32m/Users/eelco/.pyenv/versions/3.10.13/lib/python3.10/uuid.py\u001b[0m(177)\u001b[0;36m__init__\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 175 \u001b[0;31m \u001b[0mhex\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhex\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'{}'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreplace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'-'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m''\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 176 \u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhex\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;36m32\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m--> 177 \u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'badly formed hexadecimal UUID string'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 178 \u001b[0;31m \u001b[0mint\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint_\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhex\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m16\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 179 \u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mbytes_le\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\n" + ] + }, + { + "name": "stdin", + "output_type": "stream", + "text": [ + "ipdb> value\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "*** NameError: name 'value' is not defined\n" + ] + }, + { + "name": "stdin", + "output_type": "stream", + "text": [ + "ipdb> u\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "> \u001b[0;32m/Users/eelco/dev/PySyft/packages/syft/src/syft/types/uid.py\u001b[0m(71)\u001b[0;36m__init__\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 69 \u001b[0;31m \u001b[0;31m# if value is not set - create a novel and unique ID.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 70 \u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m---> 71 \u001b[0;31m \u001b[0mvalue\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0muuid\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mUUID\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mversion\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 72 \u001b[0;31m \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbytes\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 73 \u001b[0;31m \u001b[0mvalue\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0muuid\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mUUID\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbytes\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mversion\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\n" + ] + }, + { + "name": "stdin", + "output_type": "stream", + "text": [ + "ipdb> value\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "'id'\n" + ] + }, + { + "name": "stdin", + "output_type": "stream", + "text": [ + "ipdb> q\n" + ] + } + ], + "source": [ + "%debug" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "id": "7b2fce32-ec29-4b31-b21b-4255435f3a81", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 43, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "job_stash.partition.matches_unique_cks(qks.all[0].partition_key)" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "f7c560cd-a392-4410-8d2f-7a0f83097bb5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\u001b[0;31mSignature:\u001b[0m\n", + "\u001b[0mhigh_side\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpython_node\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjob_stash\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mquery_one\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\u001b[0m\n", + "\u001b[0;34m\u001b[0m \u001b[0mcredentials\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m'SyftVerifyKey'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", + "\u001b[0;34m\u001b[0m \u001b[0mqks\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m'QueryKey | QueryKeys'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", + "\u001b[0;34m\u001b[0m \u001b[0morder_by\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m'PartitionKey | None'\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", + "\u001b[0;34m\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0;34m'Result[BaseStash.object_type | None, str]'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mDocstring:\u001b[0m \n", + "\u001b[0;31mFile:\u001b[0m ~/dev/PySyft/packages/syft/src/syft/store/document_store.py\n", + "\u001b[0;31mType:\u001b[0m method" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "high_side.python_node.job_stash.query_one?" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "b6f683f0-a333-40a7-93b4-47b12b51f261", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
high-side/jobs/
\n", + "
\n", + "
\n", + "
\n", + "
\n", + " \n", + " JOB\n", + "
\n", + "\n", + " query_0\n", + "
\n", + " \n", + " \n", + "
\n", + " \n", + " #72a80389d09043e087aa5be880df38e8\n", + " \n", + " \n", + " \n", + "\n", + "
\n", + " \n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + " UserCode:\n", + " query_0\n", + "
\n", + "
\n", + " Status:\n", + " Processing\n", + "
\n", + "
\n", + " \n", + " Started At:\n", + " 2024-06-19 15:59:41.34862 by Jane Doe info@openmined.org\n", + "
\n", + "
\n", + " \n", + " Updated At:\n", + " 2024-06-19 1\n", + "
\n", + " \n", + "
\n", + " \n", + " Worker Pool:\n", + " default-pool-3 on worker \n", + " \n", + "
\n", + " \n", + " #default-pool\n", + " \n", + " \n", + " \n", + "\n", + "
\n", + " \n", + "
\n", + " \n", + "
\n", + " Subjobs:\n", + " 0\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + " \n", + " \n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + " syft.service.action.action_data_empty.ObjectNotReady\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n",
+       "        Message\n",
+       "\n",
+       "    
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + "\n" + ], + "text/markdown": [ + "```python\n", + "class Job:\n", + " id: UID = 72a80389d09043e087aa5be880df38e8\n", + " status: processing\n", + " has_parent: False\n", + " result: syft.service.action.action_data_empty.ObjectNotReady\n", + " logs:\n", + "\n", + "0 \n", + " \n", + "```" + ], + "text/plain": [ + "syft.service.job.job_stash.Job" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p.data[sy.UID(\"72a80389d09043e087aa5be880df38e8\")]" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "adfcbb52-d8d1-4bda-9aab-d1218b14ed5b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
high-side/jobs/
\n", + "
\n", + "
\n", + "
\n", + "
\n", + " \n", + " JOB\n", + "
\n", + "\n", + " query_0\n", + "
\n", + " \n", + " \n", + "
\n", + " \n", + " #72a80389d09043e087aa5be880df38e8\n", + " \n", + " \n", + " \n", + "\n", + "
\n", + " \n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + " UserCode:\n", + " query_0\n", + "
\n", + "
\n", + " Status:\n", + " Processing\n", + "
\n", + "
\n", + " \n", + " Started At:\n", + " 2024-06-19 15:59:41.34862 by Jane Doe info@openmined.org\n", + "
\n", + "
\n", + " \n", + " Updated At:\n", + " 2024-06-19 1\n", + "
\n", + " \n", + "
\n", + " \n", + " Worker Pool:\n", + " default-pool-3 on worker \n", + " \n", + "
\n", + " \n", + " #default-pool\n", + " \n", + " \n", + " \n", + "\n", + "
\n", + " \n", + "
\n", + " \n", + "
\n", + " Subjobs:\n", + " 0\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + " \n", + " \n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + " syft.service.action.action_data_empty.ObjectNotReady\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n",
+       "        Message\n",
+       "\n",
+       "    
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + "\n" + ], + "text/markdown": [ + "```python\n", + "class Job:\n", + " id: UID = 72a80389d09043e087aa5be880df38e8\n", + " status: processing\n", + " has_parent: False\n", + " result: syft.service.action.action_data_empty.ObjectNotReady\n", + " logs:\n", + "\n", + "0 \n", + " \n", + "```" + ], + "text/plain": [ + "syft.service.job.job_stash.Job" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p._get(sy.UID(\"72a80389d09043e087aa5be880df38e8\"), credentials=client_high.verify_key).ok()" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "c98ae422-6997-4e30-aedd-435bb226fac6", + "metadata": {}, + "outputs": [ + { + "ename": "AttributeError", + "evalue": "'SQLiteStorePartition' object has no attribute 'query_one'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[26], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mquery_one\u001b[49m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'SQLiteStorePartition' object has no attribute 'query_one'" + ] + } + ], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "bc107bb2-5f1c-4cd4-8a21-2d87b838257d", + "metadata": {}, + "outputs": [], + "source": [ + "client_high.services.job.get(sy.UID(\"72a80389d09043e087aa5be880df38e8\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "cef90608-9470-468d-8f14-3d8304b716ca", + "metadata": {}, + "outputs": [], + "source": [ + "client_high.services.job.get(sy.UID(\"830b9e1fd2bf4bf9b526264e3468f97c\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "86888bee-24c1-40fe-97a7-98e20a0a5783", + "metadata": {}, + "outputs": [], + "source": [ + "import sqlite3\n", + "\n", + "conn = sqlite3.connect(database=\"x\")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "8724a8dc-14b0-40ac-9abf-26b9095b7a2c", + "metadata": {}, + "outputs": [], + "source": [ + "cursor = conn.cursor()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "55392717-84cc-4bea-81ad-10b74f8a938a", + "metadata": {}, + "outputs": [], + "source": [ + "conn.close()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "0671600b-9f8c-402a-a932-43fa449cc3e1", + "metadata": {}, + "outputs": [ + { + "ename": "ProgrammingError", + "evalue": "Cannot operate on a closed database.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mProgrammingError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[12], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mcursor\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexecute\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mx\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mProgrammingError\u001b[0m: Cannot operate on a closed database." + ] + } + ], + "source": [ + "cursor.execute(\"x\")" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "1c3eb8fc-506e-4160-bdb0-97398675c9f4", + "metadata": {}, + "outputs": [ + { + "ename": "KeyError", + "evalue": "1", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[6], line 3\u001b[0m\n\u001b[1;32m 1\u001b[0m a \u001b[38;5;241m=\u001b[39m {}\n\u001b[0;32m----> 3\u001b[0m \u001b[38;5;28;01mdel\u001b[39;00m a[\u001b[38;5;241m1\u001b[39m]\n", + "\u001b[0;31mKeyError\u001b[0m: 1" + ] + } + ], + "source": [ + "a = {}\n", + "\n", + "del a[1]" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "8c0cfd89-eeb1-42c4-ab2e-26fb3eda95f8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
high-side/jobs/
\n", + "
\n", + "
\n", + "
\n", + "
\n", + " \n", + " JOB\n", + "
\n", + "\n", + " query_4\n", + "
\n", + " \n", + " \n", + "
\n", + " \n", + " #80bc379a33294eeebd1937f303143386\n", + " \n", + " \n", + " \n", + "\n", + "
\n", + " \n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + " UserCode:\n", + " query_4\n", + "
\n", + "
\n", + " Status:\n", + " Completed\n", + "
\n", + "
\n", + " \n", + " Started At:\n", + " 2024-06-19 15:18:54.33823 by Jane Doe info@openmined.org\n", + "
\n", + "
\n", + " \n", + " Updated At:\n", + " 2024-06-19 1\n", + "
\n", + " \n", + "
\n", + " \n", + " Worker Pool:\n", + " default-pool-1 on worker \n", + " \n", + "
\n", + " \n", + " #default-pool\n", + " \n", + " \n", + " \n", + "\n", + "
\n", + " \n", + "
\n", + " \n", + "
\n", + " Subjobs:\n", + " 0\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + " \n", + " \n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + " 10\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n",
+       "        Message\n",
+       "\n",
+       "    
\n", + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "
\n", + "\n" + ], + "text/markdown": [ + "```python\n", + "class Job:\n", + " id: UID = 80bc379a33294eeebd1937f303143386\n", + " status: completed\n", + " has_parent: False\n", + " result: 10\n", + " logs:\n", + "\n", + "0 \n", + "JOB COMPLETED\n", + " \n", + "```" + ], + "text/plain": [ + "syft.service.job.job_stash.Job" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client_high.jobs[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "985075a7-833b-4e4e-8441-1bc34c397148", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 2acf63b324eea028023f1c22a66b885999777ab5 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Wed, 19 Jun 2024 19:18:30 +0200 Subject: [PATCH 202/313] syft factories v2 --- packages/syft/src/syft/__init__.py | 4 +- .../src/syft/service/action/action_object.py | 2 +- .../src/syft/service/action/action_service.py | 28 +- .../syft/src/syft/service/code/user_code.py | 26 +- .../syft/src/syft/service/policy/policy.py | 293 ++++++++++++++++++ 5 files changed, 341 insertions(+), 12 deletions(-) diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index d7183898935..d01d7bafd8f 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -57,9 +57,11 @@ from .service.dataset.dataset import CreateAsset as Asset # noqa: F401 from .service.dataset.dataset import CreateDataset as Dataset # noqa: F401 from .service.notification.notifications import NotificationStatus # noqa: F401 -from .service.policy.policy import CustomInputPolicy # noqa: F401 +from .service.policy.policy import CreatePolicyRuleConstant as Constant # noqa: F401 +from .service.policy.policy import CustomInputPolicy from .service.policy.policy import CustomOutputPolicy # noqa: F401 from .service.policy.policy import ExactMatch # noqa: F401 +from .service.policy.policy import MixedInputPolicy from .service.policy.policy import SingleExecutionExactOutput # noqa: F401 from .service.policy.policy import UserInputPolicy # noqa: F401 from .service.policy.policy import UserOutputPolicy # noqa: F401 diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 8bdf940a799..528e54b0dd8 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -1940,7 +1940,7 @@ def _repr_markdown_(self, wrap_as_python: bool = True, indent: int = 0) -> str: else self.syft_action_data_cache.__repr__() ) - return f"```python\n{res}\n{data_repr_}\n```\n" + return f"\n**{res}**\n\n{data_repr_}\n" def _data_repr(self) -> str | None: if isinstance(self.syft_action_data_cache, ActionDataEmpty): diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 00f1414b247..98dff4ee608 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -307,7 +307,7 @@ def _user_code_execute( if context.node: user_code_service = context.node.get_service("usercodeservice") - input_policy = code_item.get_input_policy(context) + input_policy = code_item.get_input_policy(context, force_deserialize=True) output_policy = code_item.get_output_policy(context) if not override_execution_permission: @@ -337,6 +337,10 @@ def _user_code_execute( if isinstance(result, SyftError): return Err(result.message) filtered_kwargs = result.ok() + + if hasattr(input_policy, "transform_kwargs"): + filtered_kwargs = input_policy.transform_kwargs(filtered_kwargs) + # update input policy to track any input state has_twin_inputs = False @@ -352,8 +356,9 @@ def _user_code_execute( try: if not has_twin_inputs: # no twins + # allow python types from inputpolicy filtered_kwargs = filter_twin_kwargs( - real_kwargs, twin_mode=TwinMode.NONE + real_kwargs, twin_mode=TwinMode.NONE, allow_python_types=True ) exec_result = execute_byte_code(code_item, filtered_kwargs, context) if output_policy: @@ -373,7 +378,7 @@ def _user_code_execute( else: # twins private_kwargs = filter_twin_kwargs( - real_kwargs, twin_mode=TwinMode.PRIVATE + real_kwargs, twin_mode=TwinMode.PRIVATE, allow_python_types=True ) private_exec_result = execute_byte_code( code_item, private_kwargs, context @@ -390,7 +395,9 @@ def _user_code_execute( result_id, private_exec_result.result ) - mock_kwargs = filter_twin_kwargs(real_kwargs, twin_mode=TwinMode.MOCK) + mock_kwargs = filter_twin_kwargs( + real_kwargs, twin_mode=TwinMode.MOCK, allow_python_types=True + ) # relative from .action_data_empty import ActionDataEmpty @@ -979,7 +986,9 @@ def filter_twin_args(args: list[Any], twin_mode: TwinMode) -> Any: return filtered -def filter_twin_kwargs(kwargs: dict, twin_mode: TwinMode) -> Any: +def filter_twin_kwargs( + kwargs: dict, twin_mode: TwinMode, allow_python_types=False +) -> Any: filtered = {} for k, v in kwargs.items(): if isinstance(v, TwinObject): @@ -992,7 +1001,14 @@ def filter_twin_kwargs(kwargs: dict, twin_mode: TwinMode) -> Any: f"Filter can only use {TwinMode.PRIVATE} or {TwinMode.MOCK}" ) else: - filtered[k] = v.syft_action_data + if isinstance(v, ActionObject): + filtered[k] = v.syft_action_data + elif isinstance(v, str | int | float | dict) and allow_python_types: + filtered[k] = v + else: + raise ValueError( + f"unexepected value {v} passed to filtered twin kwargs" + ) return filtered diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 244fb162d19..27a087c8bf6 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -13,6 +13,7 @@ from io import StringIO import itertools import random +import re import sys from textwrap import dedent from threading import Thread @@ -427,9 +428,11 @@ def input_policy(self) -> InputPolicy | None: return None return self._get_input_policy() - def get_input_policy(self, context: AuthedServiceContext) -> InputPolicy | None: + def get_input_policy( + self, context: AuthedServiceContext, force_deserialize=False + ) -> InputPolicy | None: status = self.get_status(context) - if not status.approved: + if not status.approved and not force_deserialize: return None return self._get_input_policy() @@ -966,11 +969,19 @@ def syft_function_single_use( ) +def replace_func_name(src, new_func_name): + pattern = r"\bdef\s+(\w+)\s*\(" + replacement = f"def {new_func_name}(" + new_src = re.sub(pattern, replacement, src, count=1) + return new_src + + def syft_function( input_policy: InputPolicy | UID | None = None, output_policy: OutputPolicy | UID | None = None, share_results_with_owners: bool = False, worker_pool_name: str | None = None, + name: str | None = None, ) -> Callable: if input_policy is None: input_policy = EmpyInputPolicy() @@ -992,9 +1003,16 @@ def syft_function( output_policy_type = type(output_policy) def decorator(f: Any) -> SubmitUserCode: + code = dedent(inspect.getsource(f)) + if name is not None: + fname = name + code = replace_func_name(code, fname) + else: + fname = f.__name__ + res = SubmitUserCode( - code=dedent(inspect.getsource(f)), - func_name=f.__name__, + code=code, + func_name=fname, signature=inspect.signature(f), input_policy_type=input_policy_type, input_policy_init_kwargs=init_input_kwargs, diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index 736bf85407c..66ecdba46d0 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -26,11 +26,13 @@ from ...abstract_node import NodeType from ...client.api import APIRegistry from ...client.api import NodeIdentity +from ...client.api import RemoteFunction from ...node.credentials import SyftVerifyKey from ...serde.recursive_primitives import recursive_serde_register_type from ...serde.serializable import serializable from ...store.document_store import PartitionKey from ...types.datetime import DateTime +from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.transforms import TransformContext @@ -40,6 +42,8 @@ from ...types.uid import UID from ...util.util import is_interpreter_jupyter from ..action.action_object import ActionObject +from ..action.action_permissions import ActionObjectPermission +from ..action.action_permissions import ActionPermission from ..code.code_parse import GlobalsVisitor from ..code.unparse import unparse from ..context import AuthedServiceContext @@ -171,6 +175,140 @@ def partition_by_node(kwargs: dict[str, Any]) -> dict[NodeIdentity, dict[str, UI return output_kwargs +@serializable() +class PolicyRule(SyftObject): + __canonical_name__ = "PolicyRule" + __version__ = SYFT_OBJECT_VERSION_1 + + kw: str + requires_input: bool = True + + def is_met( + self, context: AuthedServiceContext, action_object: ActionObject + ) -> bool: + return False + + +@serializable() +class CreatePolicyRule(SyftObject): + __canonical_name__ = "CreatePolicyRule" + __version__ = SYFT_OBJECT_VERSION_1 + + val: Any + + +@serializable() +class CreatePolicyRuleConstant(CreatePolicyRule): + __canonical_name__ = "CreatePolicyRuleConstant" + __version__ = SYFT_OBJECT_VERSION_1 + + val: Any + + def to_policy_rule(self, kw): + return Constant(kw=kw, val=self.val) + + +@serializable() +class Matches(PolicyRule): + __canonical_name__ = "Matches" + __version__ = SYFT_OBJECT_VERSION_1 + + val: UID + + def is_met( + self, context: AuthedServiceContext, action_object: ActionObject + ) -> bool: + return action_object.id == self.val + + +@serializable() +class Constant(PolicyRule): + __canonical_name__ = "PreFill" + __version__ = SYFT_OBJECT_VERSION_1 + + val: Any + requires_input: bool = False + + def is_met(self, context: AuthedServiceContext, *args, **kwargs) -> bool: + return True + + def transform_kwarg(self, val): + return self.val + + +@serializable() +class UserOwned(PolicyRule): + __canonical_name__ = "UserOwned" + __version__ = SYFT_OBJECT_VERSION_1 + + # str, float, int, bool, dict, list, set, tuple + + type: ( + type[str] + | type[float] + | type[int] + | type[bool] + | type[dict] + | type[list] + | type[set] + | type[tuple] + | None + ) + + def is_owned( + self, context: AuthedServiceContext, action_object: ActionObject + ) -> bool: + action_store = context.node.get_service("actionservice").store + return action_store.has_permission( + ActionObjectPermission( + action_object.id, ActionPermission.OWNER, context.credentials + ) + ) + + def is_met( + self, context: AuthedServiceContext, action_object: ActionObject + ) -> bool: + return type(action_object.syft_action_data) == self.type and self.is_owned( + context, action_object + ) + + +def user_code_arg2id(arg): + if isinstance(arg, ActionObject): + uid = arg.id + elif isinstance(arg, TwinObject): + uid = arg.id + elif isinstance(arg, Asset): + uid = arg.action_id + elif isinstance(arg, RemoteFunction): + # TODO: Beach Fix + # why do we need another call to the server to get the UID? + uid = arg.custom_function_actionobject_id() + else: + uid = arg + return uid + + +def retrieve_item_from_db(id: UID, context: AuthedServiceContext) -> ActionObject: + # relative + from ...service.action.action_object import TwinMode + + action_service = context.node.get_service("actionservice") + root_context = AuthedServiceContext( + node=context.node, credentials=context.node.verify_key + ) + value = action_service._get( + context=root_context, + uid=id, + twin_mode=TwinMode.NONE, + has_permission=True, + ) + if value.is_err(): + return value + else: + return value.ok() + + class InputPolicy(Policy): __canonical_name__ = "InputPolicy" __version__ = SYFT_OBJECT_VERSION_2 @@ -227,6 +365,161 @@ def _inputs_for_context(self, context: ChangeContext) -> dict | SyftError: return inputs +@serializable() +class MixedInputPolicy(InputPolicy): + # version + __canonical_name__ = "MixedInputPolicy" + __version__ = SYFT_OBJECT_VERSION_1 + + kwarg_rules: dict[NodeIdentity, dict[str, PolicyRule]] + + def __init__( + self, init_kwargs=None, client=None, *args: Any, **kwargs: Any + ) -> None: + if init_kwargs is not None: + kwarg_rules = init_kwargs + kwargs = {} + else: + node_identity = self.find_node_identity(kwargs, client) + kwarg_rules_current_node = {} + for kw, arg in kwargs.items(): + if isinstance( + arg, UID | Asset | ActionObject | TwinObject | RemoteFunction + ): + kwarg_rules_current_node[kw] = Matches( + kw=kw, val=user_code_arg2id(arg) + ) + elif arg in [str, float, int, bool, dict, list, set, tuple]: + kwarg_rules_current_node[kw] = UserOwned(kw=kw, type=arg) + elif isinstance(arg, CreatePolicyRule): + kwarg_rules_current_node[kw] = arg.to_policy_rule(kw) + else: + raise ValueError("Incorrect argument") + kwarg_rules = {node_identity: kwarg_rules_current_node} + + super().__init__( + *args, kwarg_rules=kwarg_rules, init_kwargs=kwarg_rules, **kwargs + ) + + def transform_kwargs(self, kwargs: dict[str, Any]) -> dict[str, Any]: + for _, rules in self.kwarg_rules.items(): + for kw, rule in rules.items(): + if hasattr(rule, "transform_kwarg"): + val = rule.transform_kwarg(kwargs.get(kw, None)) + kwargs[kw] = val + return kwargs + + def find_node_identity(self, kwargs: dict[str, Any], client=None) -> NodeIdentity: + if client is not None: + return NodeIdentity.from_api(client.api) + + apis = APIRegistry.get_all_api() + matches = set() + has_ids = False + for val in kwargs.values(): + # we mostly get the UID here because we don't want to store all those + # other objects, so we need to create a global UID obj lookup service + if isinstance( + val, UID | Asset | ActionObject | TwinObject | RemoteFunction + ): + has_ids = True + id = user_code_arg2id(val) + for api in apis: + # TODO: Beach Fix + # here be dragons, we need to refactor this since the existance + # depends on the type and service + # also the whole NodeIdentity needs to be removed + check_endpoints = [ + api.services.action.exists, + api.services.api.exists, + ] + for check_endpoint in check_endpoints: + result = check_endpoint(id) + if result: + break # stop looking + if result: + node_identity = NodeIdentity.from_api(api) + matches.add(node_identity) + + if len(matches) == 0: + if not has_ids: + if len(apis) == 1: + return NodeIdentity.from_api(api) + else: + raise ValueError( + "Multiple Node Identities, please only login to one client (for this policy) and try again" + ) + else: + raise ValueError("No Node Identities") + if len(matches) > 1: + # TODO: Beach Fix + raise ValueError("Multiple Node Identities") + # we need to fix this as its possible we could + # grab the wrong API and call a different user context in jupyter testing + pass # just grab the first one + return matches.pop() + + def filter_kwargs( + self, + kwargs: dict[str, UID], + context: AuthedServiceContext, + code_item_id: UID, + ) -> Result[dict[Any, Any], str]: + try: + res = {} + for _, rules in self.kwarg_rules.items(): + for kw, rule in rules.items(): + if rule.requires_input: + passed_id = kwargs[kw] + actionobject: ActionObject = retrieve_item_from_db( + passed_id, context + ) + rule_check_args = (actionobject,) + else: + rule_check_args = () + # TODO + actionobject = rule.value + if not rule.is_met(context, *rule_check_args): + raise ValueError(f"{rule} is not met") + else: + res[kw] = actionobject + except Exception as e: + return Err(str(e)) + return Ok(res) + + def _is_valid( + self, + context: AuthedServiceContext, + usr_input_kwargs: dict, + code_item_id: UID, + ) -> Result[bool, str]: + filtered_input_kwargs = self.filter_kwargs( + kwargs=usr_input_kwargs, + context=context, + code_item_id=code_item_id, + ) + + if filtered_input_kwargs.is_err(): + return filtered_input_kwargs + + filtered_input_kwargs = filtered_input_kwargs.ok() + + expected_input_kwargs = set() + for _inp_kwargs in self.inputs.values(): + for k in _inp_kwargs.keys(): + if k not in usr_input_kwargs: + return Err(f"Function missing required keyword argument: '{k}'") + expected_input_kwargs.update(_inp_kwargs.keys()) + + permitted_input_kwargs = list(filtered_input_kwargs.keys()) + not_approved_kwargs = set(expected_input_kwargs) - set(permitted_input_kwargs) + if len(not_approved_kwargs) > 0: + return Err( + f"Input arguments: {not_approved_kwargs} to the function are not approved yet." + ) + return Ok(True) + + def retrieve_from_db( code_item_id: UID, allowed_inputs: dict[str, UID], context: AuthedServiceContext ) -> Result[dict[str, Any], str]: From 8338ba256cd3dff0115d5cde64bfaa99a9deab2b Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Wed, 19 Jun 2024 22:07:56 +0200 Subject: [PATCH 203/313] feat: sanitize html --- .../syft/util/notebook_ui/components/tabulator_template.py | 3 ++- packages/syft/src/syft/util/notebook_ui/styles.py | 2 +- packages/syft/src/syft/util/table.py | 5 +++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py index ee0576cc206..676dbe3151e 100644 --- a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py +++ b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py @@ -8,6 +8,7 @@ from IPython.display import display import jinja2 from loguru import logger +import nh3 # relative from ...assets import load_css @@ -130,7 +131,7 @@ def build_tabulator_table( uid=uid, columns=json.dumps(column_data), row_header=json.dumps(row_header), - data=json.dumps(table_data), + data=nh3.clean(json.dumps(table_data)), css=css, js=js, index_field_name=TABLE_INDEX_KEY, diff --git a/packages/syft/src/syft/util/notebook_ui/styles.py b/packages/syft/src/syft/util/notebook_ui/styles.py index a250c20a7dc..2e780394687 100644 --- a/packages/syft/src/syft/util/notebook_ui/styles.py +++ b/packages/syft/src/syft/util/notebook_ui/styles.py @@ -28,6 +28,6 @@ CSS_CODE = f""" """ diff --git a/packages/syft/src/syft/util/table.py b/packages/syft/src/syft/util/table.py index 998e022bdbd..cf05e5f7e45 100644 --- a/packages/syft/src/syft/util/table.py +++ b/packages/syft/src/syft/util/table.py @@ -8,6 +8,7 @@ # third party from loguru import logger +import nh3 # relative from .notebook_ui.components.table_template import TABLE_INDEX_KEY @@ -89,7 +90,7 @@ def _create_table_rows( if "id" in ret_val: del ret_val["id"] for key in ret_val.keys(): - cols[key].append(ret_val[key]) + cols[key].append(nh3.clean(ret_val[key])) else: for field in extra_fields: value = item @@ -134,7 +135,7 @@ def _create_table_rows( except Exception as e: print(e) value = None - cols[field].append(str(value)) + cols[field].append(nh3.clean(str(value))) col_lengths = {len(cols[col]) for col in cols.keys()} if len(col_lengths) != 1: From dc181cdde21284ad786c2bf48ed96bd25db49f20 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Wed, 19 Jun 2024 23:00:15 +0200 Subject: [PATCH 204/313] make demo --- .../src/syft/protocol/protocol_version.json | 49 +++++++++++++++++++ packages/syft/src/syft/service/api/api.py | 23 ++++++++- packages/syft/src/syft/service/queue/queue.py | 2 + .../src/syft/service/user/user_service.py | 17 +++++++ 4 files changed, 89 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index a383eed61dd..86b01e6244a 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -291,6 +291,55 @@ "hash": "9b8ab2d513d84006bdd1329cd0bb636e7e62100a6227d8b772a5bf7c0c45b72f", "action": "add" } + }, + "PolicyRule": { + "1": { + "version": 1, + "hash": "31a982b94654ce27ad27a6622c6fa26dfe3f759a7824ac21d104390f10a5aa82", + "action": "add" + } + }, + "CreatePolicyRule": { + "1": { + "version": 1, + "hash": "9b82e36c63e10c5b7b76b3b8ec1da1d2dfdce39f2cce98603a418ec221621874", + "action": "add" + } + }, + "CreatePolicyRuleConstant": { + "1": { + "version": 1, + "hash": "f3feeaeefa1a1e86528aba80426c9ae4b6c723c41095cc613499acf39075b1ed", + "action": "add" + } + }, + "Matches": { + "1": { + "version": 1, + "hash": "d1e875a6332a481458e83db364dfdf92bd34a87093d9762dfe8e136e5088bc4e", + "action": "add" + } + }, + "PreFill": { + "1": { + "version": 1, + "hash": "3f9018398a15b3207fd7b340e314adcce1bc64f40f1381f4b40412bdb061a301", + "action": "add" + } + }, + "UserOwned": { + "1": { + "version": 1, + "hash": "b5cbb44d742fa51b9adf2a48bb56d9ff5ca82a25f8568a2505961bd906d9d084", + "action": "add" + } + }, + "MixedInputPolicy": { + "1": { + "version": 1, + "hash": "0e84e4c91e378717e1a4703574b07e3b1e6a3e5707401b4e0cc8d30088a506b9", + "action": "add" + } } } } diff --git a/packages/syft/src/syft/service/api/api.py b/packages/syft/src/syft/service/api/api.py index f4b4796f509..0bd2c7fd579 100644 --- a/packages/syft/src/syft/service/api/api.py +++ b/packages/syft/src/syft/service/api/api.py @@ -17,9 +17,10 @@ from result import Err from result import Ok from result import Result +from syft.service.user.user_service import UserService # relative -from ...abstract_node import AbstractNode +from ...abstract_node import AbstractNode, NodeSideType from ...client.client import SyftClient from ...serde.serializable import serializable from ...serde.signature import signature_remove_context @@ -57,6 +58,7 @@ class TwinAPIAuthedContext(AuthedServiceContext): code: HelperFunctionSet | None = None state: dict[Any, Any] | None = None admin_client: SyftClient | None = None + user_client: SyftClient | None = None @serializable() @@ -206,6 +208,7 @@ def build_internal_context( self, context: AuthedServiceContext, admin_client: SyftClient | None = None, + user_client: SyftClient | None = None, ) -> TwinAPIAuthedContext: helper_function_dict: dict[str, Callable] = {} self.helper_functions = self.helper_functions or {} @@ -235,6 +238,7 @@ def build_internal_context( state=self.state or {}, user=user, admin_client=admin_client, + user_client=user_client ) def __call__(self, *args: Any, **kwargs: Any) -> Any: @@ -480,6 +484,20 @@ def exec_private_function( return SyftError(message="You're not allowed to run this code.") + def get_user_client_from_node(self, context: AuthedServiceContext) -> SyftClient: + # get a user client + guest_client = context.node.get_guest_client() + user_client = guest_client + signing_key_for_verify_key = context.node.get_service_method( + UserService.signing_key_for_verify_key + ) + private_key = signing_key_for_verify_key( + context=context, verify_key=context.credentials + ) + signing_key = private_key.signing_key + user_client.credentials = signing_key + return user_client + def get_admin_client_from_node(self, context: AuthedServiceContext) -> SyftClient: admin_client = context.node.get_guest_client() admin_client.credentials = context.node.signing_key @@ -499,13 +517,14 @@ def exec_code( src = ast.unparse(inner_function) raw_byte_code = compile(src, code.func_name, "exec") register_fn_in_linecache(code.func_name, src) + user_client = self.get_user_client_from_node(context) admin_client = self.get_admin_client_from_node(context) # load it exec(raw_byte_code) # nosec internal_context = code.build_internal_context( - context=context, admin_client=admin_client + context=context, admin_client=admin_client, user_client=user_client ) # execute it diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index 6bfb7bd6f89..16389dd3ede 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -168,6 +168,7 @@ def handle_message_multiprocessing( document_store_config=worker_settings.document_store_config, action_store_config=worker_settings.action_store_config, blob_storage_config=worker_settings.blob_store_config, + node_side_type=worker_settings.node_side_type, queue_config=queue_config, is_subprocess=True, migrate=False, @@ -275,6 +276,7 @@ def handle_message(message: bytes, syft_worker_id: UID) -> None: document_store_config=worker_settings.document_store_config, action_store_config=worker_settings.action_store_config, blob_storage_config=worker_settings.blob_store_config, + node_side_type=worker_settings.node_side_type, queue_config=queue_config, is_subprocess=True, migrate=False, diff --git a/packages/syft/src/syft/service/user/user_service.py b/packages/syft/src/syft/service/user/user_service.py index 7bd6b7e68af..fa586353fc4 100644 --- a/packages/syft/src/syft/service/user/user_service.py +++ b/packages/syft/src/syft/service/user/user_service.py @@ -133,6 +133,23 @@ def get_all( # 🟡 TODO: No user exists will happen when result.ok() is empty list return SyftError(message="No users exists") + def signing_key_for_verify_key( + self, context: AuthedServiceContext, verify_key: SyftVerifyKey + ) -> UserPrivateKey | SyftError: + result = self.stash.get_by_verify_key( + credentials=self.admin_verify_key(), verify_key=verify_key + ) + if result.is_ok(): + user = result.ok() + if user is not None: + return user.to(UserPrivateKey) + + return SyftError(message=f"No user exists with {verify_key}.") + + return SyftError( + message=f"Failed to retrieve user with {verify_key} with error: {result.err()}" + ) + def get_role_for_credentials( self, credentials: SyftVerifyKey | SyftSigningKey ) -> ServiceRole | None | SyftError: From 69eae7cb547940fa7dabd5df83f65617d0b93605 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Wed, 19 Jun 2024 23:01:20 +0200 Subject: [PATCH 205/313] nb --- notebooks/Bigquery full flow.ipynb | 719 +++++++++++++++++++++++++++++ 1 file changed, 719 insertions(+) create mode 100644 notebooks/Bigquery full flow.ipynb diff --git a/notebooks/Bigquery full flow.ipynb b/notebooks/Bigquery full flow.ipynb new file mode 100644 index 00000000000..dc63ecf8d52 --- /dev/null +++ b/notebooks/Bigquery full flow.ipynb @@ -0,0 +1,719 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "ce366797-29fa-4f38-af51-2b70cee8ce54", + "metadata": { + "jp-MarkdownHeadingCollapsed": true + }, + "source": [ + "# Syncing helpers" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "2e4548e0-20ce-472d-aa0f-e1c29952b694", + "metadata": {}, + "outputs": [], + "source": [ + "import time\n", + "\n", + "import syft as sy\n", + "from syft.client.domain_client import DomainClient\n", + "from syft.client.syncing import compare_clients\n", + "from syft.service.code.user_code import UserCode\n", + "from syft.service.job.job_stash import Job, JobStatus\n", + "from syft.service.request.request import Request\n", + "from syft.service.sync.diff_state import ObjectDiffBatch\n", + "\n", + "\n", + "def is_request_to_sync(batch: ObjectDiffBatch) -> bool:\n", + " # True if this is a new low-side request\n", + " # TODO add condition for sql requests/usercodes\n", + " low_request = batch.root.low_obj\n", + " return isinstance(low_request, Request) and batch.status == \"NEW\"\n", + "\n", + "\n", + "def is_job_to_sync(batch: ObjectDiffBatch):\n", + " # True if this is a new high-side job that is either COMPLETED or ERRORED\n", + " if batch.status != \"NEW\":\n", + " return False\n", + " if not isinstance(batch.root.high_obj, Job):\n", + " return False\n", + " job = batch.root.high_obj\n", + " return job.status in (JobStatus.ERRORED, JobStatus.COMPLETED)\n", + "\n", + "\n", + "def sync_new_requests(\n", + " client_low: DomainClient,\n", + " client_high: DomainClient,\n", + ") -> dict[sy.UID, sy.SyftSuccess | sy.SyftError] | sy.SyftError:\n", + " sync_request_results = {}\n", + " diff = compare_clients(\n", + " from_client=client_low, to_client=client_high, include_types=[\"request\"]\n", + " )\n", + " if isinstance(diff, sy.SyftError):\n", + " print(diff)\n", + " return sync_request_results\n", + " for batch in diff.batches:\n", + " if is_request_to_sync(batch):\n", + " request_id = batch.root.low_obj.id\n", + " w = batch.resolve()\n", + " result = w.click_sync()\n", + " sync_request_results[request_id] = result\n", + " return sync_request_results\n", + "\n", + "\n", + "def execute_requests(\n", + " client_high: DomainClient, request_ids: list[sy.UID]\n", + ") -> dict[sy.UID, Job]:\n", + " jobs_by_request_id = {}\n", + " for request_id in request_ids:\n", + " request = client_high.requests.get_by_uid(request_id)\n", + " if not isinstance(request, Request):\n", + " continue\n", + "\n", + " code = request.code\n", + " if not isinstance(code, UserCode):\n", + " continue\n", + "\n", + " func_name = request.code.service_func_name\n", + " api_func = getattr(client_high.code, func_name, None)\n", + " if api_func is None:\n", + " continue\n", + "\n", + " job = api_func(blocking=False)\n", + " jobs_by_request_id[request_id] = job\n", + " # sleep to prevent SQLite connection pool issues\n", + " time.sleep(1)\n", + "\n", + " return jobs_by_request_id\n", + "\n", + "\n", + "def sync_and_execute_new_requests(\n", + " client_low: DomainClient, client_high: DomainClient\n", + ") -> None:\n", + " sync_results = sync_new_requests(client_low, client_high)\n", + " if isinstance(sync_results, sy.SyftError):\n", + " print(sync_results)\n", + " return\n", + "\n", + " request_ids = [\n", + " uid for uid, res in sync_results.items() if isinstance(res, sy.SyftSuccess)\n", + " ]\n", + " print(f\"Synced {len(request_ids)} new requests\")\n", + "\n", + " jobs_by_request = execute_requests(client_high, request_ids)\n", + " print(f\"Started {len(jobs_by_request)} new jobs\")\n", + "\n", + "\n", + "def sync_finished_jobs(\n", + " client_low: DomainClient,\n", + " client_high: DomainClient,\n", + ") -> dict[sy.UID, sy.SyftError | sy.SyftSuccess] | sy.SyftError:\n", + " sync_job_results = {}\n", + " diff = compare_clients(\n", + " from_client=client_high, to_client=client_low, include_types=[\"job\"]\n", + " )\n", + " if isinstance(diff, sy.SyftError):\n", + " print(diff)\n", + " return diff\n", + "\n", + " for batch in diff.batches:\n", + " if is_job_to_sync(batch):\n", + " batch_id = batch.root.high_obj.id\n", + " w = batch.resolve()\n", + " share_result = w.click_share_all_private_data()\n", + " if isinstance(share_result, sy.SyftError):\n", + " sync_job_results[batch_id] = share_result\n", + " continue\n", + " sync_result = w.click_sync()\n", + " sync_job_results[batch_id] = sync_result\n", + "\n", + " print(f\"Sharing {len(sync_job_results)} new results\")\n", + " return sync_job_results\n", + "\n", + "\n", + "def auto_sync(client_low: DomainClient, client_high: DomainClient) -> None:\n", + " print(\"Starting auto sync\")\n", + " sync_and_execute_new_requests(client_low, client_high)\n", + " sync_finished_jobs(client_low, client_high)\n", + " print(\"Finished auto sync\")\n", + "\n", + "\n", + "def auto_sync_loop(\n", + " client_low: DomainClient, client_high: DomainClient, sleep_seconds: int = 60\n", + ") -> None:\n", + " while True:\n", + " auto_sync(client_low, client_high)\n", + " time.sleep(sleep_seconds)" + ] + }, + { + "cell_type": "markdown", + "id": "b150bad9-3ea2-42e1-b560-a4fe4a37cbed", + "metadata": {}, + "source": [ + "# Create Nodes" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "197db367-bc49-4a41-ba94-756e8b8b4bf4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Staging Protocol Changes...\n", + "Document Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", + "Action Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", + "Creating default worker image with tag='local-dev'\n", + "Setting up worker poolname=default-pool workers=1 image_uid=cddfb7f5ab354ddcb344f8716a0b29cb in_memory=True\n", + "Created default worker pool.\n", + "Data Migrated to latest version !!!\n", + "Staging Protocol Changes...\n", + "Document Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", + "Action Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", + "Creating default worker image with tag='local-dev'\n", + "Setting up worker poolname=default-pool workers=4 image_uid=0b456f4cf12a4d26a989163ccbd88624 in_memory=True\n", + "Created default worker pool.\n", + "Data Migrated to latest version !!!\n", + "Logged into as GUEST\n", + "Logged into as GUEST\n" + ] + } + ], + "source": [ + "low_side = sy.orchestra.launch(\n", + " name=\"auto-sync-low\",\n", + " node_side_type=\"low\",\n", + " local_db=True,\n", + " reset=True,\n", + " n_consumers=1,\n", + " create_producer=True,\n", + " dev_mode=True,\n", + ")\n", + "\n", + "high_side = sy.orchestra.launch(\n", + " name=\"high-side\",\n", + " node_side_type=\"high\",\n", + " local_db=True,\n", + " reset=True,\n", + " n_consumers=4,\n", + " create_producer=True,\n", + " dev_mode=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "01563270-ec4a-4eae-8f27-2a53d07610da", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" + ], + "text/plain": [ + "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as \n" + ] + } + ], + "source": [ + "client_high = high_side.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "client_low = low_side.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "client_low.register(email=\"newuser@openmined.org\", name=\"John Doe\", password=\"pw\", password_verify=\"pw\")\n", + "client_low_ds = low_side.login(email=\"newuser@openmined.org\", password=\"pw\")" + ] + }, + { + "cell_type": "markdown", + "id": "af1bf1a3-5c0a-4895-bc91-ffedbb6e4e08", + "metadata": {}, + "source": [ + "# Create Syftfunction factory " + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "2e3f599f-86ed-4cb8-ae7a-170f5fd59ef8", + "metadata": {}, + "outputs": [], + "source": [ + "@sy.api_endpoint(path=\"reddit.submit_query\")\n", + "def submit_query(\n", + " context, func_name: str, query: str,\n", + ") -> str:\n", + " import syft as sy\n", + "\n", + " if not func_name.isalpha():\n", + " return sy.SyftError(message=\"Please only use alphabetic characters for your func_name\")\n", + "\n", + " @sy.syft_function(name=func_name, input_policy=sy.MixedInputPolicy(query=sy.Constant(val=query), client=context.admin_client))\n", + " def execute_query(query: str):\n", + " return f\"your query {query} was EXECUTED\"\n", + "\n", + " res = context.user_client.code.request_code_execution(execute_query)\n", + "\n", + " return f\"Query submitted {res}, use `client.code.{func_name}()` to run your query\"" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "02c6287f-322e-469f-a2ae-666fc17c6dac", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Endpoint successfully created.

" + ], + "text/plain": [ + "SyftSuccess: Endpoint successfully created." + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client_low.api.services.api.add(endpoint=submit_query)" + ] + }, + { + "cell_type": "markdown", + "id": "46666113-d77a-4eb3-9154-63b4dc207d83", + "metadata": {}, + "source": [ + "# Submit request" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "027d4479-0f47-4647-9c00-7b7d87f6a80f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" + ], + "text/plain": [ + "SyftSuccess: Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "submit_res = client_low_ds.api.services.reddit.submit_query(func_name=\"myquery\",\n", + " query=\"FROM ABC SELECT *\")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "0139b14a-b14e-4ee9-9219-7b5d18715938", + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "\n", + "**Pointer**\n", + "\n", + "'Query submitted syft.service.request.request.Request, use `client.code.myquery()` to run your query'\n" + ], + "text/plain": [ + "Pointer:\n", + "'Query submitted syft.service.request.request.Request, use `client.code.myquery()` to run your query'" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "submit_res" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "7041ad9f-542e-4fd7-b780-d64bbe8e93f1", + "metadata": {}, + "outputs": [], + "source": [ + "# client_low_ds.code.myquery()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "e518ba33-9d2a-40a2-a6b8-174bc8392c77", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Starting auto sync\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Decision: Syncing 2 objects\n", + "Synced 1 new requests\n", + "Started 1 new jobs\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sharing 0 new results\n", + "Finished auto sync\n", + "Starting auto sync\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Synced 0 new requests\n", + "Started 0 new jobs\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sharing 0 new results\n", + "Finished auto sync\n", + "Starting auto sync\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Synced 0 new requests\n", + "Started 0 new jobs\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Decision: Syncing 5 objects\n", + "Sharing 1 new results\n", + "Finished auto sync\n", + "Starting auto sync\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Synced 0 new requests\n", + "Started 0 new jobs\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sharing 0 new results\n", + "Finished auto sync\n" + ] + } + ], + "source": [ + "# do forever\n", + "for i in range(4):\n", + " auto_sync(client_low, client_high)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "d0a68fb0-bc9c-4b23-a931-b0bb7009e72e", + "metadata": {}, + "outputs": [], + "source": [ + "# client_high.jobs[0].logs()" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "f92240e6-0c35-4cd3-b52d-a5fa3256e3f9", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftWarning:
Loading results from cache.

" + ], + "text/plain": [ + "SyftWarning: Loading results from cache." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "res = client_low_ds.code.myquery()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "23050872-2f0b-4d23-9c1f-57ad74ac09d5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "\n", + "**Pointer**\n", + "\n", + "'your query FROM ABC SELECT * was EXECUTED'\n" + ], + "text/plain": [ + "Pointer:\n", + "'your query FROM ABC SELECT * was EXECUTED'" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "res" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d41b8deb-0e15-4ab5-823c-751f99e491f8", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "c6a426e8-314a-4a44-8bbe-47f8f8663835", + "metadata": {}, + "outputs": [], + "source": [ + "# for _ in range(10):\n", + "# j = client_high.code.query_0(blocking=False)\n", + "# print(j)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "985075a7-833b-4e4e-8441-1bc34c397148", + "metadata": {}, + "outputs": [], + "source": [ + "# def make_request(client):\n", + "# existing_requests = client.requests.get_all()\n", + "\n", + "# @sy.syft_function_single_use()\n", + "# def func():\n", + "# return 10\n", + "\n", + "# func.func_name = f\"query_{len(existing_requests)}\"\n", + "# func.code = func.code.replace(\"def func(\", f\"def {func.func_name}(\")\n", + "\n", + "# res = client.code.request_code_execution(func)\n", + "# return res\n", + "\n", + "\n", + "# for _ in range(5):\n", + "# make_request(client_low_ds)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8674ffae-4635-43bd-9117-0959cd0040ba", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From e72cc5b0b478aca528b041981d5167d09dabaa2f Mon Sep 17 00:00:00 2001 From: Kien Dang Date: Thu, 20 Jun 2024 13:54:14 +0800 Subject: [PATCH 206/313] Fix Job._repr_html_ error after syncing --- packages/syft/src/syft/service/job/job_stash.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 8faa3a3d90a..1705fc28376 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -592,8 +592,9 @@ def _repr_html_(self) -> str: updated_at = str(self.updated_at)[:-7] if self.updated_at else "--" user_repr = "--" - if self.requested_by: - requesting_user = self.requesting_user + if self.requested_by and not isinstance( + requesting_user := self.requesting_user, SyftError + ): user_repr = f"{requesting_user.name} {requesting_user.email}" worker_attr = "" From f337fb9cf52e894d1910ef33b7252eab2080218c Mon Sep 17 00:00:00 2001 From: Kien Dang Date: Thu, 20 Jun 2024 13:59:00 +0800 Subject: [PATCH 207/313] Remove unused vars in Job._repr_html_ template --- packages/syft/src/syft/service/job/job_stash.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/syft/src/syft/service/job/job_stash.py b/packages/syft/src/syft/service/job/job_stash.py index 1705fc28376..bb85134c387 100644 --- a/packages/syft/src/syft/service/job/job_stash.py +++ b/packages/syft/src/syft/service/job/job_stash.py @@ -621,17 +621,12 @@ def _repr_html_(self) -> str: template = Template(job_repr_template) return template.substitute( - uid=str(UID()), - grid_template_columns=None, - grid_template_cell_columns=None, - cols=0, job_type=job_type, api_header=api_header, user_code_name=self.user_code_name, button_html=button_html, status=self.status.value.title(), creation_time=creation_time, - user_rerp=user_repr, updated_at=updated_at, worker_attr=worker_attr, no_subjobs=len(self.subjobs), From ce9d9e7a2c25b095966b20956644cabefa0be2d7 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 20 Jun 2024 11:04:28 +0300 Subject: [PATCH 208/313] add _post_user_code_transform_ops --- .../syft/service/code/user_code_service.py | 57 +++++++++---------- 1 file changed, 27 insertions(+), 30 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 9287e49fec4..cd07e39d26b 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -75,6 +75,19 @@ def _submit( ) -> Result[UserCode, str]: if not isinstance(code, UserCode): code = code.to(UserCode, context=context) # type: ignore[unreachable] + result = self._post_user_code_transform_ops(context, code) + if isinstance(result, SyftError): + # if the validation fails, we should remove the user code status + # and code version to prevent dangling status + root_context = AuthedServiceContext( + credentials=context.node.verify_key, node=context.node + ) + + if code.status_link is not None: + _ = context.node.get_service("usercodestatusservice").remove( + root_context, code.status_link.object_uid + ) + return result result = self.stash.set(context.credentials, code) return result @@ -133,30 +146,7 @@ def get_by_service_name( return SyftError(message=str(result.err())) return result.ok() - def _request_code_execution( - self, - context: AuthedServiceContext, - code: SubmitUserCode, - reason: str | None = "", - ) -> Request | SyftError: - user_code: UserCode = code.to(UserCode, context=context) - result = self._validate_request_code_execution(context, user_code) - if isinstance(result, SyftError): - # if the validation fails, we should remove the user code status - # and code version to prevent dangling status - root_context = AuthedServiceContext( - credentials=context.node.verify_key, node=context.node - ) - - if user_code.status_link is not None: - _ = context.node.get_service("usercodestatusservice").remove( - root_context, user_code.status_link.object_uid - ) - return result - result = self._request_code_execution_inner(context, user_code, reason) - return result - - def _validate_request_code_execution( + def _post_user_code_transform_ops( self, context: AuthedServiceContext, user_code: UserCode, @@ -197,10 +187,6 @@ def _validate_request_code_execution( if isinstance(pool_result, SyftError): return pool_result - result = self.stash.set(context.credentials, user_code) - if result.is_err(): - return SyftError(message=str(result.err())) - # Create a code history code_history_service = context.node.get_service("codehistoryservice") result = code_history_service.submit_version(context=context, code=user_code) @@ -209,7 +195,7 @@ def _validate_request_code_execution( return SyftSuccess(message="") - def _request_code_execution_inner( + def _request_code_execution( self, context: AuthedServiceContext, user_code: UserCode, @@ -260,7 +246,18 @@ def request_code_execution( reason: str | None = "", ) -> SyftSuccess | SyftError: """Request Code execution on user code""" - return self._request_code_execution(context=context, code=code, reason=reason) + + # TODO: check for duplicate submissions + user_code_or_err = self._submit(context, code) + if user_code_or_err.is_err(): + return SyftError(user_code_or_err.err()) + + result = self._request_code_execution( + context, + user_code_or_err.ok(), + reason, + ) + return result @service_method(path="code.get_all", name="get_all", roles=GUEST_ROLE_LEVEL) def get_all(self, context: AuthedServiceContext) -> list[UserCode] | SyftError: From 843e283f272d1a401b636fd44635ce962a7299f4 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Thu, 20 Jun 2024 10:05:24 +0200 Subject: [PATCH 209/313] update test --- .../syft/tests/syft/request/request_code_accept_deny_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/tests/syft/request/request_code_accept_deny_test.py b/packages/syft/tests/syft/request/request_code_accept_deny_test.py index cef84e4d2d1..b21a06579d1 100644 --- a/packages/syft/tests/syft/request/request_code_accept_deny_test.py +++ b/packages/syft/tests/syft/request/request_code_accept_deny_test.py @@ -202,4 +202,4 @@ def simple_function(data): result = ds_client.code.simple_function(data=action_obj) assert isinstance(result, SyftError) - assert "Execution denied" in result.message + assert "DENIED" in result.message From 40156267dcd6d359a5efb24e4abbe47dba7f4444 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 20 Jun 2024 11:29:19 +0300 Subject: [PATCH 210/313] use code directly in submit_version --- .../src/syft/service/code_history/code_history_service.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/syft/src/syft/service/code_history/code_history_service.py b/packages/syft/src/syft/service/code_history/code_history_service.py index adfd6dbee5d..54da3d491b3 100644 --- a/packages/syft/src/syft/service/code_history/code_history_service.py +++ b/packages/syft/src/syft/service/code_history/code_history_service.py @@ -49,11 +49,6 @@ def submit_version( if result.is_err(): return SyftError(message=str(result.err())) code = result.ok() - elif isinstance(code, UserCode): # type: ignore[unreachable] - result = user_code_service.get_by_uid(context=context, uid=code.id) - if isinstance(result, SyftError): - return result - code = result result = self.stash.get_by_service_func_name_and_verify_key( credentials=context.credentials, From d646af888b1ab60909ce0783d3c86161b9887e1b Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Thu, 20 Jun 2024 11:45:52 +0300 Subject: [PATCH 211/313] fix error message --- packages/syft/src/syft/service/code/user_code_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index cd07e39d26b..8afdcdc9bdc 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -250,7 +250,7 @@ def request_code_execution( # TODO: check for duplicate submissions user_code_or_err = self._submit(context, code) if user_code_or_err.is_err(): - return SyftError(user_code_or_err.err()) + return SyftError(message=user_code_or_err.err()) result = self._request_code_execution( context, From d7c9433829896454655f3a773f6b7fc02642c82f Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Thu, 20 Jun 2024 12:54:16 +0200 Subject: [PATCH 212/313] add safe_serde check to policies --- .../api/0.8/02-review-code-and-approve.ipynb | 15 ++++++++ notebooks/api/0.8/05-custom-policy.ipynb | 37 +++++++++++++++---- .../syft/src/syft/service/code/user_code.py | 35 +++++++++--------- .../syft/src/syft/service/policy/policy.py | 3 ++ 4 files changed, 65 insertions(+), 25 deletions(-) diff --git a/notebooks/api/0.8/02-review-code-and-approve.ipynb b/notebooks/api/0.8/02-review-code-and-approve.ipynb index ce15d881245..9612144b952 100644 --- a/notebooks/api/0.8/02-review-code-and-approve.ipynb +++ b/notebooks/api/0.8/02-review-code-and-approve.ipynb @@ -290,6 +290,21 @@ "print(op.policy_code)" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Policies provided by Syft are available before approving the code,\n", + "# Custom policies are only safe to use once the code is approved.\n", + "\n", + "assert func.output_policy is not None\n", + "assert func.input_policy is not None\n", + "\n", + "func.output_policy" + ] + }, { "cell_type": "markdown", "metadata": {}, diff --git a/notebooks/api/0.8/05-custom-policy.ipynb b/notebooks/api/0.8/05-custom-policy.ipynb index 5adaacd9a77..ee9c50d89cc 100644 --- a/notebooks/api/0.8/05-custom-policy.ipynb +++ b/notebooks/api/0.8/05-custom-policy.ipynb @@ -508,6 +508,18 @@ "cell_type": "code", "execution_count": null, "id": "23", + "metadata": {}, + "outputs": [], + "source": [ + "# Custom policies need to be approved before they can be viewed and used\n", + "assert func.input_policy is None\n", + "assert func.output_policy is None" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24", "metadata": { "tags": [] }, @@ -520,7 +532,7 @@ { "cell_type": "code", "execution_count": null, - "id": "24", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -530,7 +542,18 @@ { "cell_type": "code", "execution_count": null, - "id": "25", + "id": "26", + "metadata": {}, + "outputs": [], + "source": [ + "assert func.input_policy is not None\n", + "assert func.output_policy is not None" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27", "metadata": { "tags": [] }, @@ -543,7 +566,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -554,7 +577,7 @@ { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "29", "metadata": { "tags": [] }, @@ -566,7 +589,7 @@ { "cell_type": "code", "execution_count": null, - "id": "28", + "id": "30", "metadata": { "tags": [] }, @@ -578,7 +601,7 @@ { "cell_type": "code", "execution_count": null, - "id": "29", + "id": "31", "metadata": { "tags": [] }, @@ -594,7 +617,7 @@ { "cell_type": "code", "execution_count": null, - "id": "30", + "id": "32", "metadata": { "tags": [] }, diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index b8b1b74b848..f218dd8930d 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -554,15 +554,15 @@ def code_status(self) -> list: @property def input_policy(self) -> InputPolicy | None: - if not self.status.approved: - return None - return self._get_input_policy() + if self.status.approved or self.input_policy_type.has_safe_serde: + return self._get_input_policy() + return None def get_input_policy(self, context: AuthedServiceContext) -> InputPolicy | None: status = self.get_status(context) - if not status.approved: - return None - return self._get_input_policy() + if status.approved or self.input_policy_type.has_safe_serde: + return self._get_input_policy() + return None def _get_input_policy(self) -> InputPolicy | None: if len(self.input_policy_state) == 0: @@ -618,13 +618,18 @@ def input_policy(self, value: Any) -> None: # type: ignore raise Exception(f"You can't set {type(value)} as input_policy_state") def get_output_policy(self, context: AuthedServiceContext) -> OutputPolicy | None: - if not self.get_status(context).approved: - return None - return self._get_output_policy() + status = self.get_status(context) + if status.approved or self.output_policy_type.has_safe_serde: + return self._get_output_policy() + return None + + @property + def output_policy(self) -> OutputPolicy | None: # type: ignore + if self.status.approved or self.output_policy_type.has_safe_serde: + return self._get_output_policy() + return None def _get_output_policy(self) -> OutputPolicy | None: - # if not self.status.approved: - # return None if len(self.output_policy_state) == 0: output_policy = None if isinstance(self.output_policy_type, type) and issubclass( @@ -672,12 +677,6 @@ def input_policy_id(self) -> UID | None: return self.input_policy_init_kwargs.get("id", None) return None - @property - def output_policy(self) -> OutputPolicy | None: # type: ignore - if not self.status.approved: - return None - return self._get_output_policy() - @output_policy.setter # type: ignore def output_policy(self, value: Any) -> None: # type: ignore if isinstance(value, OutputPolicy): @@ -1153,7 +1152,7 @@ def syft_function( init_input_kwargs = None if isinstance(input_policy, CustomInputPolicy): input_policy_type = SubmitUserPolicy.from_obj(input_policy) - init_input_kwargs = partition_by_node(input_policy.init_kwargs) + init_input_kwargs = partition_by_node(input_policy.init_kwargs) # type: ignore else: input_policy_type = type(input_policy) init_input_kwargs = getattr(input_policy, "init_kwargs", {}) diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index 920721323bc..d19c59ab43a 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -14,6 +14,7 @@ import sys import types from typing import Any +from typing import ClassVar # third party from RestrictedPython import compile_restricted @@ -90,6 +91,7 @@ class Policy(SyftObject): # version __canonical_name__: str = "Policy" __version__ = SYFT_OBJECT_VERSION_2 + has_safe_serde: ClassVar[bool] = True id: UID init_kwargs: dict[Any, Any] = {} @@ -526,6 +528,7 @@ class CustomInputPolicy(metaclass=CustomPolicy): class UserPolicy(Policy): __canonical_name__: str = "UserPolicy" __version__ = SYFT_OBJECT_VERSION_2 + has_safe_serde: ClassVar[bool] = False id: UID node_uid: UID | None = None From f74968333c83c51ef7f451a6b7502dedeb90ef03 Mon Sep 17 00:00:00 2001 From: IonesioJunior Date: Thu, 20 Jun 2024 09:09:57 -0300 Subject: [PATCH 213/313] Enable Changing Mock Function Access View --- packages/syft/src/syft/service/api/api.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/api/api.py b/packages/syft/src/syft/service/api/api.py index a5d9db8f98d..0e66005ae9e 100644 --- a/packages/syft/src/syft/service/api/api.py +++ b/packages/syft/src/syft/service/api/api.py @@ -610,6 +610,7 @@ def endpoint_to_private_endpoint() -> list[Callable]: "api_code", "func_name", "settings", + "view_access", "helper_functions", "state", "signature", @@ -703,6 +704,8 @@ def create_new_api_endpoint( description: MarkdownDescription | None = None, worker_pool: str | None = None, endpoint_timeout: int = 60, + mock_access_view: bool = True, + private_access_view: bool = False, ) -> CreateTwinAPIEndpoint | SyftError: try: # Parse the string to extract the function name @@ -712,7 +715,8 @@ def create_new_api_endpoint( if private_function.signature != mock_function.signature: return SyftError(message="Signatures don't match") endpoint_signature = mock_function.signature - private_function.view_access = False + private_function.view_access = private_access_view + mock_function.view_access = mock_access_view return CreateTwinAPIEndpoint( path=path, From 29508c9e70441937441a84c5e5b686baa4832df2 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Thu, 20 Jun 2024 14:11:12 +0200 Subject: [PATCH 214/313] pass args to remote function --- packages/syft/src/syft/service/code/user_code.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index ddbd47487af..6b7dbfa35dc 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -912,7 +912,7 @@ def __call__(self, *args: Any, **kwargs: Any) -> Any: warning=None, communication_protocol=api.communication_protocol, ) - return remote_user_function() + return remote_user_function(*args, **kwargs) class UserCodeUpdate(PartialSyftObject): From 28495f07138773cbf6cf8e42b2b1a925822ffeec Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Thu, 20 Jun 2024 16:04:02 +0200 Subject: [PATCH 215/313] add existing code --- .../syft/src/syft/service/code/user_code.py | 5 +- .../syft/service/code/user_code_service.py | 125 +++++++++++------- packages/syft/src/syft/types/syft_object.py | 4 +- 3 files changed, 84 insertions(+), 50 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 6b7dbfa35dc..eac79150ffc 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -958,6 +958,9 @@ def add_output_policy_ids(cls, values: Any) -> Any: values["id"] = UID() return values + def get_code_hash(self) -> str: + return hashlib.sha256(self.code.encode()).hexdigest() + @property def kwargs(self) -> dict[Any, Any] | None: return self.input_policy_init_kwargs @@ -1371,7 +1374,7 @@ def hash_code(context: TransformContext) -> TransformContext: code = context.output["code"] context.output["raw_code"] = code - code_hash = hashlib.sha256(code.encode("utf8")).hexdigest() + code_hash = context.obj.get_code_hash() context.output["code_hash"] = code_hash return context diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 37e8e088f31..aeb4919560e 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -62,32 +62,48 @@ def __init__(self, store: DocumentStore) -> None: @service_method(path="code.submit", name="submit", roles=GUEST_ROLE_LEVEL) def submit( - self, context: AuthedServiceContext, code: UserCode | SubmitUserCode + self, context: AuthedServiceContext, code: SubmitUserCode ) -> UserCode | SyftError: """Add User Code""" - result = self._submit(context=context, code=code) + result = self._submit(context, code, exists_ok=False) if result.is_err(): return SyftError(message=str(result.err())) return SyftSuccess(message="User Code Submitted", require_api_update=True) def _submit( - self, context: AuthedServiceContext, code: UserCode | SubmitUserCode + self, + context: AuthedServiceContext, + submit_code: SubmitUserCode, + exists_ok: bool = False, ) -> Result[UserCode, str]: - if not isinstance(code, UserCode): - code = code.to(UserCode, context=context) # type: ignore[unreachable] - result = self._post_user_code_transform_ops(context, code) - if isinstance(result, SyftError): - # if the validation fails, we should remove the user code status - # and code version to prevent dangling status - root_context = AuthedServiceContext( - credentials=context.node.verify_key, node=context.node - ) + existing_code_or_err = self.stash.get_by_code_hash( + context.credentials, + code_hash=submit_code.get_code_hash(), + ) - if code.status_link is not None: - _ = context.node.get_service("usercodestatusservice").remove( - root_context, code.status_link.object_uid - ) - return result + if existing_code_or_err.is_err(): + return existing_code_or_err + existing_code = existing_code_or_err.ok() + if existing_code is not None: + if not exists_ok: + return Err("The code to be submitted already exists") + return Ok(existing_code) + + code = submit_code.to(UserCode, context=context) + + result = self._post_user_code_transform_ops(context, code) + if result.is_err(): + # if the validation fails, we should remove the user code status + # and code version to prevent dangling status + root_context = AuthedServiceContext( + credentials=context.node.verify_key, node=context.node + ) + + if code.status_link is not None: + _ = context.node.get_service("usercodestatusservice").remove( + root_context, code.status_link.object_uid + ) + return result result = self.stash.set(context.credentials, code) return result @@ -150,33 +166,15 @@ def _post_user_code_transform_ops( self, context: AuthedServiceContext, user_code: UserCode, - ) -> SyftSuccess | SyftError: + ) -> Result[UserCode, str]: if user_code.output_readers is None: - return SyftError( - message=f"there is no verified output readers for {user_code}" - ) + return Err(f"there is no verified output readers for {user_code}") if user_code.input_owner_verify_keys is None: - return SyftError( - message=f"there is no verified input owners for {user_code}" - ) + return Err(message=f"there is no verified input owners for {user_code}") if not all( x in user_code.input_owner_verify_keys for x in user_code.output_readers ): - raise ValueError("outputs can only be distributed to input owners") - - # check if the code with the same name and content already exists in the stash - - find_results = self.stash.get_by_code_hash( - context.credentials, code_hash=user_code.code_hash - ) - if find_results.is_err(): - return SyftError(message=str(find_results.err())) - find_results = find_results.ok() - - if find_results is not None: - return SyftError( - message="The code to be submitted (name and content) already exists" - ) + return Err("outputs can only be distributed to input owners") worker_pool_service = context.node.get_service("SyftWorkerPoolService") pool_result = worker_pool_service._get_worker_pool( @@ -185,15 +183,15 @@ def _post_user_code_transform_ops( ) if isinstance(pool_result, SyftError): - return pool_result + return Err(pool_result.message) # Create a code history code_history_service = context.node.get_service("codehistoryservice") result = code_history_service.submit_version(context=context, code=user_code) if isinstance(result, SyftError): - return result + return Err(result.message) - return SyftSuccess(message="") + return Ok(user_code) def _request_code_execution( self, @@ -234,6 +232,37 @@ def _request_code_execution( # The Request service already returns either a SyftSuccess or SyftError return result + def _get_or_submit_user_code( + self, + context: AuthedServiceContext, + code: SubmitUserCode | UserCode, + ) -> Result[UserCode, str]: + """ + - If the code is a UserCode, check if it exists and return + - If the code is a SubmitUserCode and the same code hash exists, return the existing code + - If the code is a SubmitUserCode and the code hash does not exist, submit the code + """ + if isinstance(code, UserCode): + # Get existing UserCode + user_code_result = self.stash.get_by_uid(context.credentials, code.id) + if user_code_result.is_err(): + return Err(user_code_result.err()) + user_code = user_code_result.ok() + if user_code is None: + return Err("UserCode not found on this node.") + return Ok(user_code) + + elif isinstance(code, SubmitUserCode): + # Submit new UserCode + # NOTE if a code with the same hash exists, it will be returned instead + user_code_result = self._submit(context, code, exists_ok=True) + return user_code_result + + else: + return Err( # type: ignore[unreachable] + f"request_code_execution expects a UserCode or SubmitUserCode object, got a {type(code).__name__}" + ) + @service_method( path="code.request_code_execution", name="request_code_execution", @@ -242,19 +271,19 @@ def _request_code_execution( def request_code_execution( self, context: AuthedServiceContext, - code: SubmitUserCode, + code: SubmitUserCode | UserCode, reason: str | None = "", ) -> SyftSuccess | SyftError: """Request Code execution on user code""" - # TODO: check for duplicate submissions - user_code_or_err = self._submit(context, code) - if user_code_or_err.is_err(): - return SyftError(message=user_code_or_err.err()) + user_code_result = self._get_or_submit_user_code(context, code) + if user_code_result.is_err(): + return SyftError(message=user_code_result.err()) + user_code = user_code_result.ok() result = self._request_code_execution( context, - user_code_or_err.ok(), + user_code, reason, ) return result diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index e4daf3a779f..04b282cdb64 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -18,6 +18,7 @@ from typing import ClassVar from typing import Optional from typing import TYPE_CHECKING +from typing import TypeVar from typing import Union from typing import get_args from typing import get_origin @@ -56,6 +57,7 @@ IntStr = int | str AbstractSetIntStr = Set[IntStr] MappingIntStrAny = Mapping[IntStr, Any] +T = TypeVar("T") SYFT_OBJECT_VERSION_1 = 1 @@ -547,7 +549,7 @@ def _upgrade_version(self, latest: bool = True) -> "SyftObject": return upgraded # transform from one supported type to another - def to(self, projection: type, context: Context | None = None) -> Any: + def to(self, projection: type[T], context: Context | None = None) -> T: # 🟡 TODO 19: Could we do an mro style inheritence conversion? Risky? transform = SyftObjectRegistry.get_transform(type(self), projection) return transform(self, context) From aed270e2cfd6f23cecd4bbce924fcdeeb7e5ed1f Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Thu, 20 Jun 2024 16:23:02 +0200 Subject: [PATCH 216/313] add existing request check --- .../syft/src/syft/service/code/user_code_service.py | 13 +++++++++++++ .../src/syft/service/request/request_service.py | 9 +++++++++ .../syft/src/syft/service/request/request_stash.py | 13 +++++++++++++ 3 files changed, 35 insertions(+) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index aeb4919560e..d0cd722644f 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -199,6 +199,19 @@ def _request_code_execution( user_code: UserCode, reason: str | None = "", ) -> Request | SyftError: + # Cannot make multiple requests for the same code + get_by_usercode_id = context.node.get_service_method( + RequestService.get_by_usercode_id + ) + existing_requests = get_by_usercode_id(context, user_code.id) + if isinstance(existing_requests, SyftError): + return existing_requests + if len(existing_requests) > 0: + return SyftError( + message=f"Request {existing_requests[0].id} already exists for this UserCode. " + f"Please use the existing request, or submit a new UserCode to create a new request." + ) + # Users that have access to the output also have access to the code item if user_code.output_readers is not None: self.stash.add_permissions( diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index b48f75f5149..a4912924a21 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -301,6 +301,15 @@ def delete_by_uid( return SyftError(message=str(result.err())) return SyftSuccess(message=f"Request with id {uid} deleted.") + @service_method(path="request.get_by_usercode_id", name="get_by_usercode_id") + def get_by_usercode_id( + self, context: AuthedServiceContext, usercode_id: UID + ) -> list[Request] | SyftError: + result = self.stash.get_by_usercode_id(context.credentials, usercode_id) + if result.is_err(): + return SyftError(message=str(result.err())) + return result.ok() + TYPE_TO_SERVICE[Request] = RequestService SERVICE_TO_TYPES[RequestService].update({Request}) diff --git a/packages/syft/src/syft/service/request/request_stash.py b/packages/syft/src/syft/service/request/request_stash.py index 5b8fe3e08c5..dedee590357 100644 --- a/packages/syft/src/syft/service/request/request_stash.py +++ b/packages/syft/src/syft/service/request/request_stash.py @@ -1,6 +1,7 @@ # stdlib # third party +from result import Ok from result import Result # relative @@ -11,6 +12,7 @@ from ...store.document_store import PartitionSettings from ...store.document_store import QueryKeys from ...types.datetime import DateTime +from ...types.uid import UID from ...util.telemetry import instrument from .request import Request @@ -42,3 +44,14 @@ def get_all_for_verify_key( qks=qks, order_by=OrderByRequestTimeStampPartitionKey, ) + + def get_by_usercode_id( + self, credentials: SyftVerifyKey, user_code_id: UID + ) -> Result[list[Request], str]: + query = self.get_all(credentials=credentials) + if query.is_err(): + return query + + all_requests: list[Request] = query.ok() + results = [r for r in all_requests if r.code_id == user_code_id] + return Ok(results) From a33f06c115ed4f1da4c1957f7c1ab2ada647f376 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Thu, 20 Jun 2024 21:57:19 +0200 Subject: [PATCH 217/313] create constants and add query functions --- .../src/syft/protocol/protocol_version.json | 4 +- .../src/syft/service/action/action_service.py | 12 ++++- .../syft/src/syft/service/policy/policy.py | 49 ++++++++++++++++--- 3 files changed, 53 insertions(+), 12 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 86b01e6244a..9687b1ac4ce 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -309,7 +309,7 @@ "CreatePolicyRuleConstant": { "1": { "version": 1, - "hash": "f3feeaeefa1a1e86528aba80426c9ae4b6c723c41095cc613499acf39075b1ed", + "hash": "51e9d217009b7eb4a1058b06533b18652b33556e145f90263cf58a0edb6de7dd", "action": "add" } }, @@ -323,7 +323,7 @@ "PreFill": { "1": { "version": 1, - "hash": "3f9018398a15b3207fd7b340e314adcce1bc64f40f1381f4b40412bdb061a301", + "hash": "090969ee54568265d192b17548dcc5d40faf5a55a02af331ff3d9e3cbf4eb958", "action": "add" } }, diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 98dff4ee608..f6b6651bb4b 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -7,6 +7,8 @@ from result import Err from result import Ok from result import Result +from syft.service.action.action_endpoint import CustomEndpointActionObject +from syft.service.api.api import TwinAPIEndpoint # relative from ...node.credentials import SyftVerifyKey @@ -339,7 +341,11 @@ def _user_code_execute( filtered_kwargs = result.ok() if hasattr(input_policy, "transform_kwargs"): - filtered_kwargs = input_policy.transform_kwargs(filtered_kwargs) + filtered_kwargs_res = input_policy.transform_kwargs(context, filtered_kwargs) + if filtered_kwargs_res.is_err(): + return filtered_kwargs_res + else: + filtered_kwargs = filtered_kwargs_res.ok() # update input policy to track any input state @@ -1003,9 +1009,11 @@ def filter_twin_kwargs( else: if isinstance(v, ActionObject): filtered[k] = v.syft_action_data - elif isinstance(v, str | int | float | dict) and allow_python_types: + elif isinstance(v, str | int | float | dict | CustomEndpointActionObject) and allow_python_types: filtered[k] = v else: + import ipdb + ipdb.set_trace() raise ValueError( f"unexepected value {v} passed to filtered twin kwargs" ) diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index bd4fd390f36..60f93a6a316 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -13,14 +13,16 @@ from io import StringIO import sys import types -from typing import Any +from typing import Any, Type # third party from RestrictedPython import compile_restricted +from pydantic import field_validator, model_validator import requests from result import Err from result import Ok from result import Result +from syft.service.action.action_endpoint import CustomEndpointActionObject # relative from ...abstract_node import NodeType @@ -207,9 +209,28 @@ class CreatePolicyRuleConstant(CreatePolicyRule): __version__ = SYFT_OBJECT_VERSION_1 val: Any + klass: None | Type = None + + @model_validator(mode='before') + @classmethod + def set_klass(cls, data: Any) -> Any: + val = data["val"] + if isinstance(val, RemoteFunction): + klass = CustomEndpointActionObject + else: + klass = type(val) + data["klass"]= klass + return data + + @field_validator("val", mode="after") + @classmethod + def idify_endpoints(cls, value: str) -> str: + if isinstance(value, RemoteFunction): + return value.custom_function_actionobject_id() + return value def to_policy_rule(self, kw): - return Constant(kw=kw, val=self.val) + return Constant(kw=kw, val=self.val, klass=self.klass) @serializable() @@ -231,13 +252,22 @@ class Constant(PolicyRule): __version__ = SYFT_OBJECT_VERSION_1 val: Any + klass: Type requires_input: bool = False def is_met(self, context: AuthedServiceContext, *args, **kwargs) -> bool: return True - def transform_kwarg(self, val): - return self.val + def transform_kwarg(self, context: AuthedServiceContext, val) -> Result[Any, str]: + if isinstance(self.val, UID): + if issubclass(self.klass, CustomEndpointActionObject): + res = context.node.get_service("actionservice").get(context, self.val) + if res.is_err(): + return res + else: + obj = res.ok() + return Ok(obj.syft_action_data) + return Ok(self.val) @serializable() @@ -405,13 +435,16 @@ def __init__( *args, kwarg_rules=kwarg_rules, init_kwargs=kwarg_rules, **kwargs ) - def transform_kwargs(self, kwargs: dict[str, Any]) -> dict[str, Any]: + def transform_kwargs(self, context: AuthedServiceContext, kwargs: dict[str, Any]) -> dict[str, Any]: for _, rules in self.kwarg_rules.items(): for kw, rule in rules.items(): if hasattr(rule, "transform_kwarg"): - val = rule.transform_kwarg(kwargs.get(kw, None)) - kwargs[kw] = val - return kwargs + res_val = rule.transform_kwarg(context, kwargs.get(kw, None)) + if res_val.is_err(): + return res_val + else: + kwargs[kw] = res_val.ok() + return Ok(kwargs) def find_node_identity(self, kwargs: dict[str, Any], client=None) -> NodeIdentity: if client is not None: From 07cb15c6f3876c2936f75fb7a97db037edf3c4b2 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Thu, 20 Jun 2024 21:58:13 +0200 Subject: [PATCH 218/313] nb --- notebooks/Bigquery full flow.ipynb | 682 +++++++++++++++++++++++++++-- 1 file changed, 642 insertions(+), 40 deletions(-) diff --git a/notebooks/Bigquery full flow.ipynb b/notebooks/Bigquery full flow.ipynb index dc63ecf8d52..7e6ee46af37 100644 --- a/notebooks/Bigquery full flow.ipynb +++ b/notebooks/Bigquery full flow.ipynb @@ -3,9 +3,7 @@ { "cell_type": "markdown", "id": "ce366797-29fa-4f38-af51-2b70cee8ce54", - "metadata": { - "jp-MarkdownHeadingCollapsed": true - }, + "metadata": {}, "source": [ "# Syncing helpers" ] @@ -83,6 +81,7 @@ " if api_func is None:\n", " continue\n", "\n", + " # job = api_func(endpoint=client_high.api.services.reddit.query, blocking=False)\n", " job = api_func(blocking=False)\n", " jobs_by_request_id[request_id] = job\n", " # sleep to prevent SQLite connection pool issues\n", @@ -161,6 +160,17 @@ { "cell_type": "code", "execution_count": 2, + "id": "555a0fbe-5736-4eb8-9d66-abf8fc639388", + "metadata": {}, + "outputs": [], + "source": [ + "from google.cloud import bigquery\n", + "from google.oauth2 import service_account" + ] + }, + { + "cell_type": "code", + "execution_count": 3, "id": "197db367-bc49-4a41-ba94-756e8b8b4bf4", "metadata": {}, "outputs": [ @@ -172,18 +182,20 @@ "Document Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", "Action Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", "Creating default worker image with tag='local-dev'\n", - "Setting up worker poolname=default-pool workers=1 image_uid=cddfb7f5ab354ddcb344f8716a0b29cb in_memory=True\n", + "Setting up worker poolname=default-pool workers=1 image_uid=7287ccb4c816433c83ab6cbc75993731 in_memory=True\n", "Created default worker pool.\n", "Data Migrated to latest version !!!\n", "Staging Protocol Changes...\n", "Document Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", "Action Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", "Creating default worker image with tag='local-dev'\n", - "Setting up worker poolname=default-pool workers=4 image_uid=0b456f4cf12a4d26a989163ccbd88624 in_memory=True\n", + "Setting up worker poolname=default-pool workers=4 image_uid=5d81c7c1395f4b76ac3da69e16fb80c3 in_memory=True\n", "Created default worker pool.\n", "Data Migrated to latest version !!!\n", "Logged into as GUEST\n", - "Logged into as GUEST\n" + "Logged into as GUEST\n", + "Logged into as GUEST\n", + "Logged into as GUEST\n" ] } ], @@ -211,7 +223,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "id": "01563270-ec4a-4eae-8f27-2a53d07610da", "metadata": {}, "outputs": [ @@ -273,12 +285,220 @@ "id": "af1bf1a3-5c0a-4895-bc91-ffedbb6e4e08", "metadata": {}, "source": [ - "# Create Syftfunction factory " + "# Create Query enpoints" + ] + }, + { + "cell_type": "markdown", + "id": "9613fc72-6ad0-4c85-87fa-92f58d4a40fc", + "metadata": {}, + "source": [ + "6. we are not limiting the result in size (IMPLEMENT)" ] }, { "cell_type": "code", "execution_count": 5, + "id": "3eb31229-93e8-46fe-ba0c-61d94d5d1910", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "with open(\"./credentials.json\", \"r\") as f:\n", + " BQ_CREDENTIALS = json.loads(f.read())" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "d1758200-c979-48df-b962-9f8f3c5737c0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Endpoint successfully created.

" + ], + "text/plain": [ + "SyftSuccess: Endpoint successfully created." + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Mock API\n", + "\n", + "@sy.api_endpoint_method(\n", + " settings={}\n", + ")\n", + "def mock_query_function(\n", + " context,\n", + " sql_query: str,\n", + ") -> str:\n", + " # third party\n", + " import numpy as np\n", + " import pandas as pd\n", + "\n", + " # syft absolute\n", + " from syft.service.response import SyftError\n", + "\n", + " # Set the seed for reproducibility\n", + " np.random.seed(42)\n", + " try:\n", + " # Generate mock data\n", + " data = {\n", + " \"Name\": [f\"Name_{i}\" for i in range(1, 11)],\n", + " \"Age\": np.random.randint(20, 50, size=10),\n", + " \"Email\": [f\"name_{i}@example.com\" for i in range(1, 11)],\n", + " \"JoinDate\": pd.date_range(start=\"2023-01-01\", periods=10, freq=\"M\")\n", + " .strftime(\"%Y-%m-%d\")\n", + " .tolist(),\n", + " \"Salary\": np.random.randint(40000, 120000, size=10),\n", + " }\n", + "\n", + " # Create DataFrame\n", + " return pd.DataFrame(data)\n", + " except Exception:\n", + " return SyftError(\n", + " message=\"Ops! Something went wrong. please, contact your admin\"\n", + " )\n", + "\n", + "\n", + "# Private API\n", + "@sy.api_endpoint_method(\n", + " settings=BQ_CREDENTIALS\n", + ")\n", + "def private_query_function(\n", + " context,\n", + " sql_query: str,\n", + ") -> str:\n", + " # third party\n", + " from google.cloud import bigquery\n", + " from google.oauth2 import service_account\n", + "\n", + " # syft absolute\n", + " from syft.service.response import SyftError\n", + "\n", + " # Client query\n", + " credentials = service_account.Credentials.from_service_account_info(context.settings)\n", + " scoped_credentials = credentials.with_scopes(['https://www.googleapis.com/auth/cloud-platform'])\n", + "\n", + " client = bigquery.Client(\n", + " credentials=scoped_credentials,\n", + " location=\"us-west1\", \n", + " )\n", + " # Generate mock data\n", + " rows = client.query_and_wait(\n", + " sql_query,\n", + " project=\"reddit-testing-415005\",\n", + " )\n", + " if rows.total_rows > 40000:\n", + " return SyftError(message=\"Please only write queries that gather aggregate statistics\")\n", + " # Create DataFrame\n", + " res = rows.to_dataframe()\n", + " return res\n", + "\n", + "\n", + "# Create new Twin API using bigquery-pool as a worker pool\n", + "new_endpoint = sy.TwinAPIEndpoint(\n", + " path=\"reddit.query\",\n", + " description=\"Ask SQL Queries using our BQ\",\n", + " private_function=private_query_function,\n", + " mock_function=mock_query_function,\n", + ")\n", + "\n", + "client_high.custom_api.add(endpoint=new_endpoint)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "f67f6643-727e-4c92-922a-04e58c7ac42c", + "metadata": {}, + "outputs": [], + "source": [ + "if False:\n", + " client_high.api.services.reddit.query.private(sql_query=\"SELECT * from data_10gb.comments LIMIT 40\").head()" + ] + }, + { + "cell_type": "markdown", + "id": "265933cc-3141-429f-863b-5c792de64736", + "metadata": {}, + "source": [ + "# Sync TwinAPI to LowSide" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "d076c2b5-f58c-4385-8cde-8b575631c3bc", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Decision: Syncing 1 objects\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftSuccess:
Synced 1 items

" + ], + "text/plain": [ + "SyftSuccess: Synced 1 items" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "widget = compare_clients(from_client=client_high, to_client=client_low).resolve()\n", + "widget.click_sync(0)" + ] + }, + { + "cell_type": "markdown", + "id": "b85cbfbd-95c4-4ceb-8bf1-dd80b996d559", + "metadata": {}, + "source": [ + "# Create Function factory" + ] + }, + { + "cell_type": "code", + "execution_count": 8, "id": "2e3f599f-86ed-4cb8-ae7a-170f5fd59ef8", "metadata": {}, "outputs": [], @@ -292,10 +512,14 @@ " if not func_name.isalpha():\n", " return sy.SyftError(message=\"Please only use alphabetic characters for your func_name\")\n", "\n", - " @sy.syft_function(name=func_name, input_policy=sy.MixedInputPolicy(query=sy.Constant(val=query), client=context.admin_client))\n", - " def execute_query(query: str):\n", - " return f\"your query {query} was EXECUTED\"\n", - "\n", + " @sy.syft_function(name=func_name,\n", + " input_policy=sy.MixedInputPolicy(endpoint=sy.Constant(val=context.admin_client.api.services.reddit.query),\n", + " query=sy.Constant(val=query),\n", + " client=context.admin_client))\n", + " def execute_query(query: str, endpoint):\n", + " res = endpoint.private(sql_query=query);\n", + " return res\n", + " \n", " res = context.user_client.code.request_code_execution(execute_query)\n", "\n", " return f\"Query submitted {res}, use `client.code.{func_name}()` to run your query\"" @@ -303,7 +527,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 9, "id": "02c6287f-322e-469f-a2ae-666fc17c6dac", "metadata": {}, "outputs": [ @@ -316,7 +540,7 @@ "SyftSuccess: Endpoint successfully created." ] }, - "execution_count": 6, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -335,7 +559,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 10, "id": "027d4479-0f47-4647-9c00-7b7d87f6a80f", "metadata": {}, "outputs": [ @@ -354,13 +578,13 @@ ], "source": [ "submit_res = client_low_ds.api.services.reddit.submit_query(func_name=\"myquery\",\n", - " query=\"FROM ABC SELECT *\")" + " query=\"SELECT * from data_10gb.comments LIMIT 40\")" ] }, { "cell_type": "code", - "execution_count": 8, - "id": "0139b14a-b14e-4ee9-9219-7b5d18715938", + "execution_count": 11, + "id": "c19b0ff1-4b28-4e3c-99ca-3778e5ba52f6", "metadata": {}, "outputs": [ { @@ -376,7 +600,7 @@ "'Query submitted syft.service.request.request.Request, use `client.code.myquery()` to run your query'" ] }, - "execution_count": 8, + "execution_count": 11, "metadata": {}, "output_type": "execute_result" } @@ -387,17 +611,25 @@ }, { "cell_type": "code", - "execution_count": 10, - "id": "7041ad9f-542e-4fd7-b780-d64bbe8e93f1", + "execution_count": 13, + "id": "a7573008-bb4f-43b6-84da-bf797ec9dac6", "metadata": {}, "outputs": [], "source": [ "# client_low_ds.code.myquery()" ] }, + { + "cell_type": "markdown", + "id": "a35540cc-eb17-4a83-9fea-a6823573e404", + "metadata": {}, + "source": [ + "# Run Autosync" + ] + }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 14, "id": "e518ba33-9d2a-40a2-a6b8-174bc8392c77", "metadata": {}, "outputs": [ @@ -527,8 +759,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Decision: Syncing 5 objects\n", - "Sharing 1 new results\n", + "Sharing 0 new results\n", "Finished auto sync\n", "Starting auto sync\n" ] @@ -569,7 +800,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Sharing 0 new results\n", + "Decision: Syncing 5 objects\n", + "Sharing 1 new results\n", "Finished auto sync\n" ] } @@ -581,18 +813,16 @@ ] }, { - "cell_type": "code", - "execution_count": 12, - "id": "d0a68fb0-bc9c-4b23-a931-b0bb7009e72e", + "cell_type": "markdown", + "id": "8a3b3ec5-2999-4665-a754-d43bef11b05d", "metadata": {}, - "outputs": [], "source": [ - "# client_high.jobs[0].logs()" + "# Run function as DS" ] }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 15, "id": "f92240e6-0c35-4cd3-b52d-a5fa3256e3f9", "metadata": {}, "outputs": [ @@ -615,40 +845,412 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 16, "id": "23050872-2f0b-4d23-9c1f-57ad74ac09d5", "metadata": {}, + "outputs": [], + "source": [ + "df = res.get()" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "bb13bcf7-1e1b-4575-84ca-570f13a570c2", + "metadata": {}, "outputs": [ { "data": { - "text/markdown": [ + "text/html": [ + "
\n", + "\n", + "
\n", - " \n", - " #\n", - " \n", - " \n", - " \n", - " Message\n", - " \n", - "
\n", - "
\n", - " 0\n", - "
\n", - "
\n", - "
\n", - " Subjob Iter 0\n", - "
\n", - "
\n", - "
\n", - " 1\n", - "
\n", - "
\n", - "
\n", - " Subjob Iter 1\n", - "
\n", - "
\n", - "
\n", - " 2\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
idpost_idparent_idcreated_atlast_modified_atbodyauthor_idgildedscoreupvote_ratiodeletedcollapsed_in_crowd_controlspamsubreddit_idpermalink
0t1_jsrssaat3_3eq9p3rt1_j0bm0qn2020-02-05 13:15:44+00:00NaTWASHINGTON (AP) — The federal government groun...t2_31y14bfhFalse30.65FalseFalseFalset5_7i2tp/r/t5_7i2tp/comments/eq9p3r/comment/jsrssaa
1t1_z014wynt3_mtoy3viNone2020-02-05 13:15:44+00:00NaTHe was indicted on 16 felony charges, includin...t2_iemo2ikgFalse81.00FalseFalseFalset5_xg19m/r/t5_xg19m/comments/mtoy3vi/comment/z014wyn
2t1_8ttp66lt3_is0dk32None2020-02-05 13:15:44+00:00NaTNonet2_csenfqwlFalse61.00FalseFalseFalset5_unjsw/r/t5_unjsw/comments/is0dk32/comment/8ttp66l
3t1_qhuklsmt3_7ajgpjeNone2020-02-05 13:15:44+00:00NaTThese nachos are so sinful; it's hard to stop ...t2_2ztp96r7False70.69FalseFalseFalset5_91cqb/r/t5_91cqb/comments/7ajgpje/comment/qhuklsm
4t1_8nkh2zbt3_oygwavxt1_0mzt6bq2020-02-05 13:15:44+00:00NaTWhen we last checked in with Charles Platkin, ...t2_o79jr0e0False51.00TrueFalseFalset5_y71mw/r/t5_y71mw/comments/oygwavx/comment/8nkh2zb
\n", + "
" ], "text/plain": [ - "Pointer:\n", - "'your query FROM ABC SELECT * was EXECUTED'" + " id post_id parent_id created_at \\\n", + "0 t1_jsrssaa t3_3eq9p3r t1_j0bm0qn 2020-02-05 13:15:44+00:00 \n", + "1 t1_z014wyn t3_mtoy3vi None 2020-02-05 13:15:44+00:00 \n", + "2 t1_8ttp66l t3_is0dk32 None 2020-02-05 13:15:44+00:00 \n", + "3 t1_qhuklsm t3_7ajgpje None 2020-02-05 13:15:44+00:00 \n", + "4 t1_8nkh2zb t3_oygwavx t1_0mzt6bq 2020-02-05 13:15:44+00:00 \n", + "\n", + " last_modified_at body \\\n", + "0 NaT WASHINGTON (AP) — The federal government groun... \n", + "1 NaT He was indicted on 16 felony charges, includin... \n", + "2 NaT None \n", + "3 NaT These nachos are so sinful; it's hard to stop ... \n", + "4 NaT When we last checked in with Charles Platkin, ... \n", + "\n", + " author_id gilded score upvote_ratio deleted \\\n", + "0 t2_31y14bfh False 3 0.65 False \n", + "1 t2_iemo2ikg False 8 1.00 False \n", + "2 t2_csenfqwl False 6 1.00 False \n", + "3 t2_2ztp96r7 False 7 0.69 False \n", + "4 t2_o79jr0e0 False 5 1.00 True \n", + "\n", + " collapsed_in_crowd_control spam subreddit_id \\\n", + "0 False False t5_7i2tp \n", + "1 False False t5_xg19m \n", + "2 False False t5_unjsw \n", + "3 False False t5_91cqb \n", + "4 False False t5_y71mw \n", + "\n", + " permalink \n", + "0 /r/t5_7i2tp/comments/eq9p3r/comment/jsrssaa \n", + "1 /r/t5_xg19m/comments/mtoy3vi/comment/z014wyn \n", + "2 /r/t5_unjsw/comments/is0dk32/comment/8ttp66l \n", + "3 /r/t5_91cqb/comments/7ajgpje/comment/qhuklsm \n", + "4 /r/t5_y71mw/comments/oygwavx/comment/8nkh2zb " ] }, - "execution_count": 14, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "res" + "df.head()" ] }, { "cell_type": "code", "execution_count": null, - "id": "d41b8deb-0e15-4ab5-823c-751f99e491f8", + "id": "024d9969-3345-41a6-b904-9a04ab4bfdad", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2d851f1e-75af-4d0f-8523-f30e2d0d9951", "metadata": {}, "outputs": [], "source": [] }, + { + "cell_type": "code", + "execution_count": null, + "id": "14065b63-0102-4519-9749-dbf05c91c22b", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "77ff0fc4-243a-4d2e-acb4-cd29570855d1", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dd79cf84-9ac2-41ce-ae10-862754917be1", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "12604e72-5229-4dfc-8b55-802ef335d356", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "85ce2d7c-cd95-497b-b39d-6e72f3d808a2", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9dc3f93f-1126-43b6-86be-a7b93b5a4af4", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b5c37a54-1a62-4390-a348-eee7ef94c1af", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "9c402c70-63d6-4835-bcaa-eed6ebfd2d65", + "metadata": { + "jp-MarkdownHeadingCollapsed": true + }, + "source": [ + "# Appendix" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9a12184b-9bc0-438f-a55f-2efa2ed6c93a", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "bfebf0d6-3e1e-445a-883c-3c95dba0a558", + "metadata": {}, + "outputs": [], + "source": [ + "# @sy.syft_function(input_policy=sy.MixedInputPolicy(endpoint=client_high.api.services.reddit.query, client=client_high))\n", + "# def execute_query(endpoint):\n", + "# res = endpoint(sql_query=\"SELECT * from data_10gb.comments LIMIT 40\");\n", + "# return res\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "8ecee14c-fa09-426c-a0f5-4a5e9904e3d7", + "metadata": {}, + "outputs": [], + "source": [ + "# client_high.code.submit(execute_query)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "bf902d3b-4bfc-470f-9b47-b7a195e45fe4", + "metadata": {}, + "outputs": [], + "source": [ + "# res = client_high.code.execute_query(endpoint=client_high.api.services.reddit.query)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "abfd38f6-1dfb-4a36-83b3-74bbbf03cfbe", + "metadata": {}, + "outputs": [], + "source": [ + "# res" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "8945ad4a-2106-45a9-b6eb-dfe9b35c3b0a", + "metadata": {}, + "outputs": [], + "source": [ + "# @sy.syft_function(input_policy=sy.ExactMatch(endpoint=client_high.api.services.reddit.query))\n", + "# def execute_query(endpoint):\n", + "# res = endpoint(sql_query=\"SELECT * from data_10gb.comments LIMIT 40\");\n", + "# return res\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "6c99de9f-7aed-430f-a81b-7215d4fe9270", + "metadata": {}, + "outputs": [], + "source": [ + "# client_high.code.submit(abc)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "e1759c17-e1e4-4061-9fe2-0ed757fb7a97", + "metadata": {}, + "outputs": [], + "source": [ + "# res = client_high.code.abc(endpoint=client_high.api.services.reddit.query)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "520377d1-5774-43cd-bb4c-35bd925fdb89", + "metadata": {}, + "outputs": [], + "source": [ + "# res.syft_action_data.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "e71aa8c6-d827-485e-bcf2-218f634346fc", + "metadata": {}, + "outputs": [], + "source": [ + "# @sy.api_endpoint(path=\"reddit.submit_query\")\n", + "# def submit_query(\n", + "# context, func_name: str, query: str,\n", + "# ) -> str:\n", + "# import syft as sy\n", + "\n", + "# if not func_name.isalpha():\n", + "# return sy.SyftError(message=\"Please only use alphabetic characters for your func_name\")\n", + "\n", + "# @sy.syft_function(name=func_name, input_policy=sy.MixedInputPolicy(query=sy.Constant(val=query),client=context.admin_client)\n", + "# def execute_query(query: str):\n", + " \n", + "# return f\"your query {query} was EXECUTED\"\n", + "\n", + "# res = context.user_client.code.request_code_execution(execute_query)\n", + "\n", + "# return f\"Query submitted {res}, use `client.code.{func_name}()` to run your query\"" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "bc8c4bb4-f41f-4344-8817-41c4e6780912", + "metadata": {}, + "outputs": [], + "source": [ + "# sy.api_endpoint(path=\"reddit.query\"" + ] + }, { "cell_type": "code", "execution_count": 1, From e82b7233e2e11f8a1f676ee4579ed0cce4b7422d Mon Sep 17 00:00:00 2001 From: Julian Cardonnet Date: Thu, 20 Jun 2024 18:30:16 -0300 Subject: [PATCH 219/313] Use /metadata endpoint as LivenessProbe. Remove initialDelaySeconds attribute. --- packages/syft/src/syft/custom_worker/runner_k8s.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/syft/src/syft/custom_worker/runner_k8s.py b/packages/syft/src/syft/custom_worker/runner_k8s.py index edd7e5892bf..ddb9765042c 100644 --- a/packages/syft/src/syft/custom_worker/runner_k8s.py +++ b/packages/syft/src/syft/custom_worker/runner_k8s.py @@ -232,10 +232,9 @@ def _create_stateful_set( "volumeMounts": volume_mounts, "livenessProbe": { "httpGet": { - "path": "/api/v2/?probe=livenessProbe", + "path": "/api/v2/metadata?probe=livenessProbe", "port": 80, }, - "initialDelaySeconds": 30, "periodSeconds": 15, "timeoutSeconds": 5, "failureThreshold": 3, From e485e54375293efb88e389ebcbc78b1997665898 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Wed, 19 Jun 2024 22:08:10 +0200 Subject: [PATCH 220/313] feat: add init sanitizer --- packages/syft/src/syft/__init__.py | 67 +++++++++++++++++++ .../components/tabulator_template.py | 2 +- packages/syft/src/syft/util/table.py | 5 +- 3 files changed, 72 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index e442a5302d9..e3ecc2e472c 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -4,10 +4,13 @@ from collections.abc import Callable import pathlib from pathlib import Path +import re import sys from types import MethodType from typing import Any +from syft.types.dicttuple import DictTuple + # relative from .abstract_node import NodeSideType # noqa: F401 from .abstract_node import NodeType # noqa: F401 @@ -95,9 +98,73 @@ logger.start() +def _patch_ipython_sanitization() -> None: + from IPython import get_ipython + from IPython.display import display_html, display_markdown + + ip = get_ipython() + if ip is None: + return + + from importlib import resources + import nh3 + from .util.notebook_ui.styles import FONT_CSS, ITABLES_CSS, JS_DOWNLOAD_FONTS, CSS_CODE + from .util.assets import load_js, load_css + + tabulator_js = load_js('tabulator.min.js') + tabulator_js = tabulator_js.replace( + "define(t)", "define('tabulator-tables', [], t)" + ) + + SKIP_SANITIZE = [ + FONT_CSS, + ITABLES_CSS, + CSS_CODE, + JS_DOWNLOAD_FONTS, + tabulator_js, + load_css("tabulator_pysyft.min.css"), + load_js("table.js"), + ] + + css_reinsert = f""" + + +{JS_DOWNLOAD_FONTS} +{CSS_CODE} +""" + + escaped_js_css = re.compile("|".join(re.escape(substr) for substr in SKIP_SANITIZE), re.IGNORECASE | re.MULTILINE) + + table_template = resources.files('syft.assets.jinja').joinpath('table.jinja2').read_text() + table_template = table_template.strip() + table_template = re.sub(r'\\{\\{.*?\\}\\}', '.*?', re.escape(table_template)) + escaped_template = re.compile(table_template, re.DOTALL | re.VERBOSE) + + def display_sanitized_html(obj) -> None: + if hasattr(obj, "_repr_html_"): + _str = obj._repr_html_() + matching_template = escaped_template.findall(_str) + print("matching_template") + _str = escaped_template.sub('', _str) + _str = escaped_js_css.sub('', _str) + _str = nh3.clean(_str) + return f"{css_reinsert} {_str} {"\n".join(matching_template)}" + + def display_sanitized_md(obj) -> None: + if hasattr(obj, "_repr_markdown_"): + return nh3.clean(obj._repr_markdown_()) + + ip.display_formatter.formatters['text/html'].for_type(SyftObject, display_sanitized_html) + ip.display_formatter.formatters['text/html'].for_type(DictTuple, display_sanitized_html) + ip.display_formatter.formatters['text/markdown'].for_type(SyftObject, display_sanitized_md) + +_patch_ipython_sanitization() + + def _patch_ipython_autocompletion() -> None: try: # third party + from IPython import get_ipython from IPython.core.guarded_eval import EVALUATION_POLICIES except ImportError: return diff --git a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py index 676dbe3151e..eabaf8c2a70 100644 --- a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py +++ b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py @@ -131,7 +131,7 @@ def build_tabulator_table( uid=uid, columns=json.dumps(column_data), row_header=json.dumps(row_header), - data=nh3.clean(json.dumps(table_data)), + data=(json.dumps(table_data)), css=css, js=js, index_field_name=TABLE_INDEX_KEY, diff --git a/packages/syft/src/syft/util/table.py b/packages/syft/src/syft/util/table.py index cf05e5f7e45..8deecf0190b 100644 --- a/packages/syft/src/syft/util/table.py +++ b/packages/syft/src/syft/util/table.py @@ -90,7 +90,10 @@ def _create_table_rows( if "id" in ret_val: del ret_val["id"] for key in ret_val.keys(): - cols[key].append(nh3.clean(ret_val[key])) + # if isinstance(ret_val[key], str): + # cols[key].append(nh3.clean(ret_val[key])) + # else: + cols[key].append(ret_val[key]) else: for field in extra_fields: value = item From 3437e2f6f4218aebc036a309dd88cd5163b69e42 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Thu, 20 Jun 2024 21:02:35 +0200 Subject: [PATCH 221/313] fix: APISubModulesView now uses the new table --- packages/syft/src/syft/client/api.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index c4a3a1b40a9..67d5fb1f21e 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -23,6 +23,7 @@ from pydantic import TypeAdapter from result import OkErr from result import Result +from syft.util.notebook_ui.components.tabulator_template import build_tabulator_table, show_table from typeguard import check_type # relative @@ -730,9 +731,9 @@ def recursively_get_submodules( APISubModulesView(submodule=submodule_name, endpoints=child_paths) ) - return list_dict_repr_html(views) - # return NotImplementedError - + return build_tabulator_table(views) + + # should never happen? results = self.get_all() return results._repr_html_() From 95cc27e438ea3297ba053cbdbbfb72e0f29243cc Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Thu, 20 Jun 2024 23:35:13 +0200 Subject: [PATCH 222/313] fix: sanitize but keep id and type intact --- .../syft/util/notebook_ui/components/tabulator_template.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py index eabaf8c2a70..b7b07277181 100644 --- a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py +++ b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py @@ -69,7 +69,7 @@ def format_dict(data: Any) -> str: return data if set(data.keys()) != {"type", "value"}: - return str(data) + return nh3.clean(str(data)) if "badge" in data["type"]: return Badge(value=data["value"], badge_class=data["type"]).to_html() @@ -87,7 +87,7 @@ def format_table_data(table_data: list[dict[str, Any]]) -> list[dict[str, str]]: row_formatted: dict[str, str] = {} for k, v in row.items(): if isinstance(v, str): - row_formatted[k] = v.replace("\n", "
") + row_formatted[k] = nh3.clean(v.replace("\n", "
")) continue v_formatted = format_dict(v) row_formatted[k] = v_formatted @@ -131,7 +131,7 @@ def build_tabulator_table( uid=uid, columns=json.dumps(column_data), row_header=json.dumps(row_header), - data=(json.dumps(table_data)), + data=json.dumps(table_data), css=css, js=js, index_field_name=TABLE_INDEX_KEY, From bf1d42c4af2f6f9a32c2799490c0b827727fef21 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Thu, 20 Jun 2024 23:39:34 +0200 Subject: [PATCH 223/313] fix: move ipython patches to its own file --- packages/syft/src/syft/__init__.py | 147 +---------- .../syft/src/syft/util/ipython_patches.py | 231 ++++++++++++++++++ 2 files changed, 233 insertions(+), 145 deletions(-) create mode 100644 packages/syft/src/syft/util/ipython_patches.py diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index e3ecc2e472c..f6f0db9a929 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -10,6 +10,7 @@ from typing import Any from syft.types.dicttuple import DictTuple +from syft.util.ipython_patches import patch_ipython # relative from .abstract_node import NodeSideType # noqa: F401 @@ -97,151 +98,7 @@ logger.start() - -def _patch_ipython_sanitization() -> None: - from IPython import get_ipython - from IPython.display import display_html, display_markdown - - ip = get_ipython() - if ip is None: - return - - from importlib import resources - import nh3 - from .util.notebook_ui.styles import FONT_CSS, ITABLES_CSS, JS_DOWNLOAD_FONTS, CSS_CODE - from .util.assets import load_js, load_css - - tabulator_js = load_js('tabulator.min.js') - tabulator_js = tabulator_js.replace( - "define(t)", "define('tabulator-tables', [], t)" - ) - - SKIP_SANITIZE = [ - FONT_CSS, - ITABLES_CSS, - CSS_CODE, - JS_DOWNLOAD_FONTS, - tabulator_js, - load_css("tabulator_pysyft.min.css"), - load_js("table.js"), - ] - - css_reinsert = f""" - - -{JS_DOWNLOAD_FONTS} -{CSS_CODE} -""" - - escaped_js_css = re.compile("|".join(re.escape(substr) for substr in SKIP_SANITIZE), re.IGNORECASE | re.MULTILINE) - - table_template = resources.files('syft.assets.jinja').joinpath('table.jinja2').read_text() - table_template = table_template.strip() - table_template = re.sub(r'\\{\\{.*?\\}\\}', '.*?', re.escape(table_template)) - escaped_template = re.compile(table_template, re.DOTALL | re.VERBOSE) - - def display_sanitized_html(obj) -> None: - if hasattr(obj, "_repr_html_"): - _str = obj._repr_html_() - matching_template = escaped_template.findall(_str) - print("matching_template") - _str = escaped_template.sub('', _str) - _str = escaped_js_css.sub('', _str) - _str = nh3.clean(_str) - return f"{css_reinsert} {_str} {"\n".join(matching_template)}" - - def display_sanitized_md(obj) -> None: - if hasattr(obj, "_repr_markdown_"): - return nh3.clean(obj._repr_markdown_()) - - ip.display_formatter.formatters['text/html'].for_type(SyftObject, display_sanitized_html) - ip.display_formatter.formatters['text/html'].for_type(DictTuple, display_sanitized_html) - ip.display_formatter.formatters['text/markdown'].for_type(SyftObject, display_sanitized_md) - -_patch_ipython_sanitization() - - -def _patch_ipython_autocompletion() -> None: - try: - # third party - from IPython import get_ipython - from IPython.core.guarded_eval import EVALUATION_POLICIES - except ImportError: - return - - ipython = get_ipython() - if ipython is None: - return - - try: - # this allows property getters to be used in nested autocomplete - ipython.Completer.evaluation = "limited" - ipython.Completer.use_jedi = False - policy = EVALUATION_POLICIES["limited"] - - policy.allowed_getattr_external.update( - [ - ("syft.client.api", "APIModule"), - ("syft.client.api", "SyftAPI"), - ] - ) - original_can_get_attr = policy.can_get_attr - - def patched_can_get_attr(value: Any, attr: str) -> bool: - attr_name = "__syft_allow_autocomplete__" - # first check if exist to prevent side effects - if hasattr(value, attr_name) and attr in getattr(value, attr_name, []): - if attr in dir(value): - return True - else: - return False - else: - return original_can_get_attr(value, attr) - - policy.can_get_attr = patched_can_get_attr - except Exception: - print("Failed to patch ipython autocompletion for syft property getters") - - try: - # this constraints the completions for autocomplete. - # if __syft_dir__ is defined we only autocomplete those properties - # stdlib - import re - - original_attr_matches = ipython.Completer.attr_matches - - def patched_attr_matches(self, text: str) -> list[str]: # type: ignore - res = original_attr_matches(text) - m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer) - if not m2: - return res - expr, _ = m2.group(1, 2) - obj = self._evaluate_expr(expr) - if isinstance(obj, SyftObject) and hasattr(obj, "__syft_dir__"): - # here we filter all autocomplete results to only contain those - # defined in __syft_dir__, however the original autocomplete prefixes - # have the full path, while __syft_dir__ only defines the attr - attrs = set(obj.__syft_dir__()) - new_res = [] - for r in res: - splitted = r.split(".") - if len(splitted) > 1: - attr_name = splitted[-1] - if attr_name in attrs: - new_res.append(r) - return new_res - else: - return res - - ipython.Completer.attr_matches = MethodType( - patched_attr_matches, ipython.Completer - ) - except Exception: - print("Failed to patch syft autocompletion for __syft_dir__") - - -_patch_ipython_autocompletion() - +patch_ipython() def module_property(func: Any) -> Callable: """Decorator to turn module functions into properties. diff --git a/packages/syft/src/syft/util/ipython_patches.py b/packages/syft/src/syft/util/ipython_patches.py new file mode 100644 index 00000000000..e4ce3876aee --- /dev/null +++ b/packages/syft/src/syft/util/ipython_patches.py @@ -0,0 +1,231 @@ +# stdlib +from collections.abc import Callable +import pathlib +from pathlib import Path +import re +import sys +from types import MethodType +from typing import Any + +from syft.types.dicttuple import DictTuple + +# relative +from .abstract_node import NodeSideType # noqa: F401 +from .abstract_node import NodeType # noqa: F401 +from .client.client import connect # noqa: F401 +from .client.client import login # noqa: F401 +from .client.client import login_as_guest # noqa: F401 +from .client.client import register # noqa: F401 +from .client.domain_client import DomainClient # noqa: F401 +from .client.gateway_client import GatewayClient # noqa: F401 +from .client.registry import DomainRegistry # noqa: F401 +from .client.registry import EnclaveRegistry # noqa: F401 +from .client.registry import NetworkRegistry # noqa: F401 +from .client.search import Search # noqa: F401 +from .client.search import SearchResults # noqa: F401 +from .client.user_settings import UserSettings # noqa: F401 +from .client.user_settings import settings # noqa: F401 +from .custom_worker.config import DockerWorkerConfig # noqa: F401 +from .custom_worker.config import PrebuiltWorkerConfig # noqa: F401 +from .node.credentials import SyftSigningKey # noqa: F401 +from .node.domain import Domain # noqa: F401 +from .node.enclave import Enclave # noqa: F401 +from .node.gateway import Gateway # noqa: F401 +from .node.server import serve_node # noqa: F401 +from .node.server import serve_node as bind_worker # noqa: F401 +from .node.worker import Worker # noqa: F401 +from .orchestra import Orchestra as orchestra # noqa: F401 +from .protocol.data_protocol import bump_protocol_version # noqa: F401 +from .protocol.data_protocol import check_or_stage_protocol # noqa: F401 +from .protocol.data_protocol import get_data_protocol # noqa: F401 +from .protocol.data_protocol import stage_protocol_changes # noqa: F401 +from .serde import NOTHING # noqa: F401 +from .serde.deserialize import _deserialize as deserialize # noqa: F401 +from .serde.serializable import serializable # noqa: F401 +from .serde.serialize import _serialize as serialize # noqa: F401 +from .service.action.action_data_empty import ActionDataEmpty # noqa: F401 +from .service.action.action_object import ActionObject # noqa: F401 +from .service.action.plan import Plan # noqa: F401 +from .service.action.plan import planify # noqa: F401 +from .service.api.api import api_endpoint # noqa: F401 +from .service.api.api import api_endpoint_method # noqa: F401 +from .service.api.api import create_new_api_endpoint as TwinAPIEndpoint # noqa: F401 +from .service.code.user_code import UserCodeStatus # noqa: F401; noqa: F401 +from .service.code.user_code import syft_function # noqa: F401; noqa: F401 +from .service.code.user_code import syft_function_single_use # noqa: F401; noqa: F401 +from .service.data_subject import DataSubjectCreate as DataSubject # noqa: F401 +from .service.dataset.dataset import Contributor # noqa: F401 +from .service.dataset.dataset import CreateAsset as Asset # noqa: F401 +from .service.dataset.dataset import CreateDataset as Dataset # noqa: F401 +from .service.notification.notifications import NotificationStatus # noqa: F401 +from .service.policy.policy import CustomInputPolicy # noqa: F401 +from .service.policy.policy import CustomOutputPolicy # noqa: F401 +from .service.policy.policy import ExactMatch # noqa: F401 +from .service.policy.policy import SingleExecutionExactOutput # noqa: F401 +from .service.policy.policy import UserInputPolicy # noqa: F401 +from .service.policy.policy import UserOutputPolicy # noqa: F401 +from .service.project.project import ProjectSubmit as Project # noqa: F401 +from .service.request.request import SubmitRequest as Request # noqa: F401 +from .service.response import SyftError # noqa: F401 +from .service.response import SyftNotReady # noqa: F401 +from .service.response import SyftSuccess # noqa: F401 +from .service.user.roles import Roles as roles # noqa: F401 +from .service.user.user_service import UserService # noqa: F401 +from .stable_version import LATEST_STABLE_SYFT +from .types.syft_object import SyftObject +from .types.twin_object import TwinObject # noqa: F401 +from .types.uid import UID # noqa: F401 +from .util import filterwarnings # noqa: F401 +from .util import logger # noqa: F401 +from .util import options # noqa: F401 +from .util.autoreload import disable_autoreload # noqa: F401 +from .util.autoreload import enable_autoreload # noqa: F401 +from .util.telemetry import instrument # noqa: F401 +from .util.util import autocache # noqa: F401 +from .util.util import get_root_data_path # noqa: F401 +from .util.version_compare import make_requires + +def _patch_ipython_sanitization() -> None: + try: + from IPython import get_ipython + from IPython.display import display_html, display_markdown + except ImportError: + return + + ip = get_ipython() + if ip is None: + return + + from importlib import resources + import nh3 + from .util.notebook_ui.styles import FONT_CSS, ITABLES_CSS, JS_DOWNLOAD_FONTS, CSS_CODE + from .util.assets import load_js, load_css + + tabulator_js = load_js('tabulator.min.js') + tabulator_js = tabulator_js.replace( + "define(t)", "define('tabulator-tables', [], t)" + ) + + SKIP_SANITIZE = [ + FONT_CSS, + ITABLES_CSS, + CSS_CODE, + JS_DOWNLOAD_FONTS, + tabulator_js, + load_css("tabulator_pysyft.min.css"), + load_js("table.js"), + ] + + css_reinsert = f""" + + +{JS_DOWNLOAD_FONTS} +{CSS_CODE} +""" + + escaped_js_css = re.compile("|".join(re.escape(substr) for substr in SKIP_SANITIZE), re.IGNORECASE | re.MULTILINE) + + table_template = resources.files('syft.assets.jinja').joinpath('table.jinja2').read_text() + table_template = table_template.strip() + table_template = re.sub(r'\\{\\{.*?\\}\\}', '.*?', re.escape(table_template)) + escaped_template = re.compile(table_template, re.DOTALL | re.VERBOSE) + + def display_sanitized_html(obj) -> str | None: + if hasattr(obj, "_repr_html_") and callable(obj._repr_html_): + _str = obj._repr_html_() + matching_template = escaped_template.findall(_str) + _str = escaped_template.sub('', _str) + _str = escaped_js_css.sub('', _str) + _str = nh3.clean(_str) + return f"{css_reinsert} {_str} {"\n".join(matching_template)}" + + def display_sanitized_md(obj) -> None: + if hasattr(obj, "_repr_markdown_"): + return nh3.clean(obj._repr_markdown_()) + + ip.display_formatter.formatters['text/html'].for_type(SyftObject, display_sanitized_html) + ip.display_formatter.formatters['text/html'].for_type(DictTuple, display_sanitized_html) + ip.display_formatter.formatters['text/markdown'].for_type(SyftObject, display_sanitized_md) + + +def _patch_ipython_autocompletion() -> None: + try: + # third party + from IPython import get_ipython + from IPython.core.guarded_eval import EVALUATION_POLICIES + except ImportError: + return + + ipython = get_ipython() + if ipython is None: + return + + try: + # this allows property getters to be used in nested autocomplete + ipython.Completer.evaluation = "limited" + ipython.Completer.use_jedi = False + policy = EVALUATION_POLICIES["limited"] + + policy.allowed_getattr_external.update( + [ + ("syft.client.api", "APIModule"), + ("syft.client.api", "SyftAPI"), + ] + ) + original_can_get_attr = policy.can_get_attr + + def patched_can_get_attr(value: Any, attr: str) -> bool: + attr_name = "__syft_allow_autocomplete__" + # first check if exist to prevent side effects + if hasattr(value, attr_name) and attr in getattr(value, attr_name, []): + if attr in dir(value): + return True + else: + return False + else: + return original_can_get_attr(value, attr) + + policy.can_get_attr = patched_can_get_attr + except Exception: + print("Failed to patch ipython autocompletion for syft property getters") + + try: + # this constraints the completions for autocomplete. + # if __syft_dir__ is defined we only autocomplete those properties + original_attr_matches = ipython.Completer.attr_matches + + def patched_attr_matches(self, text: str) -> list[str]: # type: ignore + res = original_attr_matches(text) + m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer) + if not m2: + return res + expr, _ = m2.group(1, 2) + obj = self._evaluate_expr(expr) + if isinstance(obj, SyftObject) and hasattr(obj, "__syft_dir__"): + # here we filter all autocomplete results to only contain those + # defined in __syft_dir__, however the original autocomplete prefixes + # have the full path, while __syft_dir__ only defines the attr + attrs = set(obj.__syft_dir__()) + new_res = [] + for r in res: + splitted = r.split(".") + if len(splitted) > 1: + attr_name = splitted[-1] + if attr_name in attrs: + new_res.append(r) + return new_res + else: + return res + + ipython.Completer.attr_matches = MethodType( + patched_attr_matches, ipython.Completer + ) + except Exception: + print("Failed to patch syft autocompletion for __syft_dir__") + + +def patch_ipython(): + _patch_ipython_sanitization() + _patch_ipython_autocompletion() + + From cd2be2d37e9be6198e3a3653fb53abc313e0a761 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Thu, 20 Jun 2024 23:44:47 +0200 Subject: [PATCH 224/313] fix: more sanitization --- packages/syft/src/syft/__init__.py | 8 +-- packages/syft/src/syft/client/api.py | 5 +- .../syft/src/syft/util/ipython_patches.py | 67 +++++++++++-------- packages/syft/src/syft/util/table.py | 3 - 4 files changed, 44 insertions(+), 39 deletions(-) diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index f6f0db9a929..f4b28443e40 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -4,14 +4,9 @@ from collections.abc import Callable import pathlib from pathlib import Path -import re import sys -from types import MethodType from typing import Any -from syft.types.dicttuple import DictTuple -from syft.util.ipython_patches import patch_ipython - # relative from .abstract_node import NodeSideType # noqa: F401 from .abstract_node import NodeType # noqa: F401 @@ -75,7 +70,6 @@ from .service.user.roles import Roles as roles # noqa: F401 from .service.user.user_service import UserService # noqa: F401 from .stable_version import LATEST_STABLE_SYFT -from .types.syft_object import SyftObject from .types.twin_object import TwinObject # noqa: F401 from .types.uid import UID # noqa: F401 from .util import filterwarnings # noqa: F401 @@ -83,6 +77,7 @@ from .util import options # noqa: F401 from .util.autoreload import disable_autoreload # noqa: F401 from .util.autoreload import enable_autoreload # noqa: F401 +from .util.ipython_patches import patch_ipython from .util.telemetry import instrument # noqa: F401 from .util.util import autocache # noqa: F401 from .util.util import get_root_data_path # noqa: F401 @@ -100,6 +95,7 @@ patch_ipython() + def module_property(func: Any) -> Callable: """Decorator to turn module functions into properties. Function names must be prefixed with an underscore.""" diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 67d5fb1f21e..6622ed3e81d 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -23,7 +23,6 @@ from pydantic import TypeAdapter from result import OkErr from result import Result -from syft.util.notebook_ui.components.tabulator_template import build_tabulator_table, show_table from typeguard import check_type # relative @@ -63,7 +62,7 @@ from ..types.uid import UID from ..util.autoreload import autoreload_enabled from ..util.markdown import as_markdown_python_code -from ..util.table import list_dict_repr_html +from ..util.notebook_ui.components.tabulator_template import build_tabulator_table from ..util.telemetry import instrument from ..util.util import prompt_warning_message from .connection import NodeConnection @@ -732,7 +731,7 @@ def recursively_get_submodules( ) return build_tabulator_table(views) - + # should never happen? results = self.get_all() return results._repr_html_() diff --git a/packages/syft/src/syft/util/ipython_patches.py b/packages/syft/src/syft/util/ipython_patches.py index e4ce3876aee..ab8e0a354d9 100644 --- a/packages/syft/src/syft/util/ipython_patches.py +++ b/packages/syft/src/syft/util/ipython_patches.py @@ -1,15 +1,10 @@ # stdlib -from collections.abc import Callable -import pathlib -from pathlib import Path import re -import sys from types import MethodType from typing import Any -from syft.types.dicttuple import DictTuple - # relative +from ..types.dicttuple import DictTuple from .abstract_node import NodeSideType # noqa: F401 from .abstract_node import NodeType # noqa: F401 from .client.client import connect # noqa: F401 @@ -71,7 +66,6 @@ from .service.response import SyftSuccess # noqa: F401 from .service.user.roles import Roles as roles # noqa: F401 from .service.user.user_service import UserService # noqa: F401 -from .stable_version import LATEST_STABLE_SYFT from .types.syft_object import SyftObject from .types.twin_object import TwinObject # noqa: F401 from .types.uid import UID # noqa: F401 @@ -83,12 +77,12 @@ from .util.telemetry import instrument # noqa: F401 from .util.util import autocache # noqa: F401 from .util.util import get_root_data_path # noqa: F401 -from .util.version_compare import make_requires + def _patch_ipython_sanitization() -> None: try: + # third party from IPython import get_ipython - from IPython.display import display_html, display_markdown except ImportError: return @@ -96,12 +90,21 @@ def _patch_ipython_sanitization() -> None: if ip is None: return + # stdlib from importlib import resources + + # third party import nh3 - from .util.notebook_ui.styles import FONT_CSS, ITABLES_CSS, JS_DOWNLOAD_FONTS, CSS_CODE - from .util.assets import load_js, load_css - tabulator_js = load_js('tabulator.min.js') + # relative + from .util.assets import load_css + from .util.assets import load_js + from .util.notebook_ui.styles import CSS_CODE + from .util.notebook_ui.styles import FONT_CSS + from .util.notebook_ui.styles import ITABLES_CSS + from .util.notebook_ui.styles import JS_DOWNLOAD_FONTS + + tabulator_js = load_js("tabulator.min.js") tabulator_js = tabulator_js.replace( "define(t)", "define('tabulator-tables', [], t)" ) @@ -123,29 +126,41 @@ def _patch_ipython_sanitization() -> None: {CSS_CODE} """ - escaped_js_css = re.compile("|".join(re.escape(substr) for substr in SKIP_SANITIZE), re.IGNORECASE | re.MULTILINE) + escaped_js_css = re.compile( + "|".join(re.escape(substr) for substr in SKIP_SANITIZE), + re.IGNORECASE | re.MULTILINE, + ) - table_template = resources.files('syft.assets.jinja').joinpath('table.jinja2').read_text() + table_template = ( + resources.files("syft.assets.jinja").joinpath("table.jinja2").read_text() + ) table_template = table_template.strip() - table_template = re.sub(r'\\{\\{.*?\\}\\}', '.*?', re.escape(table_template)) + table_template = re.sub(r"\\{\\{.*?\\}\\}", ".*?", re.escape(table_template)) escaped_template = re.compile(table_template, re.DOTALL | re.VERBOSE) - def display_sanitized_html(obj) -> str | None: - if hasattr(obj, "_repr_html_") and callable(obj._repr_html_): - _str = obj._repr_html_() + def display_sanitized_html(obj: SyftObject | DictTuple) -> str | None: + if hasattr(obj, "_repr_html_") and callable(obj._repr_html_): # type: ignore + _str = obj._repr_html_() # type: ignore matching_template = escaped_template.findall(_str) - _str = escaped_template.sub('', _str) - _str = escaped_js_css.sub('', _str) + _str = escaped_template.sub("", _str) + _str = escaped_js_css.sub("", _str) _str = nh3.clean(_str) return f"{css_reinsert} {_str} {"\n".join(matching_template)}" + return None - def display_sanitized_md(obj) -> None: + def display_sanitized_md(obj: SyftObject) -> None: if hasattr(obj, "_repr_markdown_"): return nh3.clean(obj._repr_markdown_()) - ip.display_formatter.formatters['text/html'].for_type(SyftObject, display_sanitized_html) - ip.display_formatter.formatters['text/html'].for_type(DictTuple, display_sanitized_html) - ip.display_formatter.formatters['text/markdown'].for_type(SyftObject, display_sanitized_md) + ip.display_formatter.formatters["text/html"].for_type( + SyftObject, display_sanitized_html + ) + ip.display_formatter.formatters["text/html"].for_type( + DictTuple, display_sanitized_html + ) + ip.display_formatter.formatters["text/markdown"].for_type( + SyftObject, display_sanitized_md + ) def _patch_ipython_autocompletion() -> None: @@ -224,8 +239,6 @@ def patched_attr_matches(self, text: str) -> list[str]: # type: ignore print("Failed to patch syft autocompletion for __syft_dir__") -def patch_ipython(): +def patch_ipython() -> None: _patch_ipython_sanitization() _patch_ipython_autocompletion() - - diff --git a/packages/syft/src/syft/util/table.py b/packages/syft/src/syft/util/table.py index 8deecf0190b..f4965cb1ef0 100644 --- a/packages/syft/src/syft/util/table.py +++ b/packages/syft/src/syft/util/table.py @@ -90,9 +90,6 @@ def _create_table_rows( if "id" in ret_val: del ret_val["id"] for key in ret_val.keys(): - # if isinstance(ret_val[key], str): - # cols[key].append(nh3.clean(ret_val[key])) - # else: cols[key].append(ret_val[key]) else: for field in extra_fields: From 1ac431f8db36f41cec6994a79f7f07ba3b3bf2a1 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Thu, 20 Jun 2024 23:55:37 +0200 Subject: [PATCH 225/313] fix: imports in ipython_patches.py --- .../syft/src/syft/util/ipython_patches.py | 71 ------------------- 1 file changed, 71 deletions(-) diff --git a/packages/syft/src/syft/util/ipython_patches.py b/packages/syft/src/syft/util/ipython_patches.py index ab8e0a354d9..04bea086c5e 100644 --- a/packages/syft/src/syft/util/ipython_patches.py +++ b/packages/syft/src/syft/util/ipython_patches.py @@ -5,78 +5,7 @@ # relative from ..types.dicttuple import DictTuple -from .abstract_node import NodeSideType # noqa: F401 -from .abstract_node import NodeType # noqa: F401 -from .client.client import connect # noqa: F401 -from .client.client import login # noqa: F401 -from .client.client import login_as_guest # noqa: F401 -from .client.client import register # noqa: F401 -from .client.domain_client import DomainClient # noqa: F401 -from .client.gateway_client import GatewayClient # noqa: F401 -from .client.registry import DomainRegistry # noqa: F401 -from .client.registry import EnclaveRegistry # noqa: F401 -from .client.registry import NetworkRegistry # noqa: F401 -from .client.search import Search # noqa: F401 -from .client.search import SearchResults # noqa: F401 -from .client.user_settings import UserSettings # noqa: F401 -from .client.user_settings import settings # noqa: F401 -from .custom_worker.config import DockerWorkerConfig # noqa: F401 -from .custom_worker.config import PrebuiltWorkerConfig # noqa: F401 -from .node.credentials import SyftSigningKey # noqa: F401 -from .node.domain import Domain # noqa: F401 -from .node.enclave import Enclave # noqa: F401 -from .node.gateway import Gateway # noqa: F401 -from .node.server import serve_node # noqa: F401 -from .node.server import serve_node as bind_worker # noqa: F401 -from .node.worker import Worker # noqa: F401 -from .orchestra import Orchestra as orchestra # noqa: F401 -from .protocol.data_protocol import bump_protocol_version # noqa: F401 -from .protocol.data_protocol import check_or_stage_protocol # noqa: F401 -from .protocol.data_protocol import get_data_protocol # noqa: F401 -from .protocol.data_protocol import stage_protocol_changes # noqa: F401 -from .serde import NOTHING # noqa: F401 -from .serde.deserialize import _deserialize as deserialize # noqa: F401 -from .serde.serializable import serializable # noqa: F401 -from .serde.serialize import _serialize as serialize # noqa: F401 -from .service.action.action_data_empty import ActionDataEmpty # noqa: F401 -from .service.action.action_object import ActionObject # noqa: F401 -from .service.action.plan import Plan # noqa: F401 -from .service.action.plan import planify # noqa: F401 -from .service.api.api import api_endpoint # noqa: F401 -from .service.api.api import api_endpoint_method # noqa: F401 -from .service.api.api import create_new_api_endpoint as TwinAPIEndpoint # noqa: F401 -from .service.code.user_code import UserCodeStatus # noqa: F401; noqa: F401 -from .service.code.user_code import syft_function # noqa: F401; noqa: F401 -from .service.code.user_code import syft_function_single_use # noqa: F401; noqa: F401 -from .service.data_subject import DataSubjectCreate as DataSubject # noqa: F401 -from .service.dataset.dataset import Contributor # noqa: F401 -from .service.dataset.dataset import CreateAsset as Asset # noqa: F401 -from .service.dataset.dataset import CreateDataset as Dataset # noqa: F401 -from .service.notification.notifications import NotificationStatus # noqa: F401 -from .service.policy.policy import CustomInputPolicy # noqa: F401 -from .service.policy.policy import CustomOutputPolicy # noqa: F401 -from .service.policy.policy import ExactMatch # noqa: F401 -from .service.policy.policy import SingleExecutionExactOutput # noqa: F401 -from .service.policy.policy import UserInputPolicy # noqa: F401 -from .service.policy.policy import UserOutputPolicy # noqa: F401 -from .service.project.project import ProjectSubmit as Project # noqa: F401 -from .service.request.request import SubmitRequest as Request # noqa: F401 -from .service.response import SyftError # noqa: F401 -from .service.response import SyftNotReady # noqa: F401 -from .service.response import SyftSuccess # noqa: F401 -from .service.user.roles import Roles as roles # noqa: F401 -from .service.user.user_service import UserService # noqa: F401 from .types.syft_object import SyftObject -from .types.twin_object import TwinObject # noqa: F401 -from .types.uid import UID # noqa: F401 -from .util import filterwarnings # noqa: F401 -from .util import logger # noqa: F401 -from .util import options # noqa: F401 -from .util.autoreload import disable_autoreload # noqa: F401 -from .util.autoreload import enable_autoreload # noqa: F401 -from .util.telemetry import instrument # noqa: F401 -from .util.util import autocache # noqa: F401 -from .util.util import get_root_data_path # noqa: F401 def _patch_ipython_sanitization() -> None: From d48082933e9e35856789917f7a96b1194dfff968 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Thu, 20 Jun 2024 23:57:49 +0200 Subject: [PATCH 226/313] fix: ipython_patches.py -> patch_ipython.py --- packages/syft/src/syft/__init__.py | 2 +- .../syft/src/syft/util/{ipython_patches.py => patch_ipython.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename packages/syft/src/syft/util/{ipython_patches.py => patch_ipython.py} (100%) diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index f4b28443e40..d1df56cf81a 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -77,7 +77,7 @@ from .util import options # noqa: F401 from .util.autoreload import disable_autoreload # noqa: F401 from .util.autoreload import enable_autoreload # noqa: F401 -from .util.ipython_patches import patch_ipython +from .util.patch_ipython import patch_ipython from .util.telemetry import instrument # noqa: F401 from .util.util import autocache # noqa: F401 from .util.util import get_root_data_path # noqa: F401 diff --git a/packages/syft/src/syft/util/ipython_patches.py b/packages/syft/src/syft/util/patch_ipython.py similarity index 100% rename from packages/syft/src/syft/util/ipython_patches.py rename to packages/syft/src/syft/util/patch_ipython.py From d0ad3b0b2b1aa3259334b5457e7ff44d8eb025ec Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 00:09:36 +0200 Subject: [PATCH 227/313] fix: import typo --- packages/syft/src/syft/util/patch_ipython.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index 04bea086c5e..175c251febb 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -5,7 +5,7 @@ # relative from ..types.dicttuple import DictTuple -from .types.syft_object import SyftObject +from ..types.syft_object import SyftObject def _patch_ipython_sanitization() -> None: From ffe480be4dcf89ec18689994c9d57e2360973ed7 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 00:15:51 +0200 Subject: [PATCH 228/313] fix: support SyftResponseMessage --- packages/syft/src/syft/util/patch_ipython.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index 175c251febb..93962b6c5f8 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -4,6 +4,7 @@ from typing import Any # relative +from ..service.response import SyftResponseMessage from ..types.dicttuple import DictTuple from ..types.syft_object import SyftObject @@ -26,12 +27,12 @@ def _patch_ipython_sanitization() -> None: import nh3 # relative - from .util.assets import load_css - from .util.assets import load_js - from .util.notebook_ui.styles import CSS_CODE - from .util.notebook_ui.styles import FONT_CSS - from .util.notebook_ui.styles import ITABLES_CSS - from .util.notebook_ui.styles import JS_DOWNLOAD_FONTS + from .assets import load_css + from .assets import load_js + from .notebook_ui.styles import CSS_CODE + from .notebook_ui.styles import FONT_CSS + from .notebook_ui.styles import ITABLES_CSS + from .notebook_ui.styles import JS_DOWNLOAD_FONTS tabulator_js = load_js("tabulator.min.js") tabulator_js = tabulator_js.replace( @@ -77,10 +78,14 @@ def display_sanitized_html(obj: SyftObject | DictTuple) -> str | None: return f"{css_reinsert} {_str} {"\n".join(matching_template)}" return None - def display_sanitized_md(obj: SyftObject) -> None: + def display_sanitized_md(obj: SyftObject) -> str | None: if hasattr(obj, "_repr_markdown_"): return nh3.clean(obj._repr_markdown_()) + return None + ip.display_formatter.formatters["text/html"].for_type( + SyftResponseMessage, display_sanitized_html + ) ip.display_formatter.formatters["text/html"].for_type( SyftObject, display_sanitized_html ) From 8a62934598831098ea199413bed6a18f7ba2d162 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 00:24:21 +0200 Subject: [PATCH 229/313] fix: support response and exception --- packages/syft/src/syft/service/response.py | 5 +++-- packages/syft/src/syft/util/patch_ipython.py | 4 ---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/syft/src/syft/service/response.py b/packages/syft/src/syft/service/response.py index 723970cdfff..f924dfc48d7 100644 --- a/packages/syft/src/syft/service/response.py +++ b/packages/syft/src/syft/service/response.py @@ -4,6 +4,7 @@ from typing import Any # third party +import nh3 from result import Err # relative @@ -44,7 +45,7 @@ def _repr_html_(self) -> str: f'
' f"{type(self).__name__}: " f'
'
-            f"{self.message}

" + f"{nh3.clean(self.message)}

" ) @@ -107,7 +108,7 @@ def _repr_html_class_(self) -> str: def _repr_html_(self) -> str: return ( f'
' - + f"{type(self).__name__}: {self.args}

" + + f"{type(self).__name__}: {nh3.clean(self.args)}

" ) @staticmethod diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index 93962b6c5f8..822e7df2e45 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -4,7 +4,6 @@ from typing import Any # relative -from ..service.response import SyftResponseMessage from ..types.dicttuple import DictTuple from ..types.syft_object import SyftObject @@ -83,9 +82,6 @@ def display_sanitized_md(obj: SyftObject) -> str | None: return nh3.clean(obj._repr_markdown_()) return None - ip.display_formatter.formatters["text/html"].for_type( - SyftResponseMessage, display_sanitized_html - ) ip.display_formatter.formatters["text/html"].for_type( SyftObject, display_sanitized_html ) From 7ca9121d16aa4a1829c93bc57d9ab8d71b243589 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 01:01:26 +0200 Subject: [PATCH 230/313] fix: more sanitization --- .../src/syft/util/notebook_ui/components/tabulator_template.py | 2 +- packages/syft/src/syft/util/patch_ipython.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py index b7b07277181..a4a136deb39 100644 --- a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py +++ b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py @@ -78,7 +78,7 @@ def format_dict(data: Any) -> str: if "clipboard" in data["type"]: return CopyButton(copy_text=data["value"]).to_html() - return str(data) + return nh3.clean(str(data)) def format_table_data(table_data: list[dict[str, Any]]) -> list[dict[str, str]]: diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index 822e7df2e45..79ebdeabf71 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -78,7 +78,7 @@ def display_sanitized_html(obj: SyftObject | DictTuple) -> str | None: return None def display_sanitized_md(obj: SyftObject) -> str | None: - if hasattr(obj, "_repr_markdown_"): + if hasattr(obj, "_repr_markdown_") and callable(obj._repr_markdown_): return nh3.clean(obj._repr_markdown_()) return None From 0ff5389da44a88073d4e067ba14704f21db443c9 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 03:22:58 +0200 Subject: [PATCH 231/313] refactor: extract common logic for tabulator table rendering - Created `_render_tabulator_table` to handle shared rendering logic - Updated `build_tabulator_table` to use the new helper function - Created `build_tabulator_table_with_data` for cases where the data and metadata are passed and there's no need to create an extra SyftObject for table rows. --- .../components/tabulator_template.py | 107 +++++++++++------- 1 file changed, 68 insertions(+), 39 deletions(-) diff --git a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py index a4a136deb39..69c172181b7 100644 --- a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py +++ b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py @@ -95,6 +95,71 @@ def format_table_data(table_data: list[dict[str, Any]]) -> list[dict[str, str]]: return formatted +def _render_tabulator_table( + uid: str, + table_data: list[dict], + table_metadata: dict, + max_height: int | None, + pagination: bool, + header_sort: bool, +) -> str: + table_template = env.get_template("table.jinja2") + tabulator_js = load_js("tabulator.min.js") + tabulator_css = load_css("tabulator_pysyft.min.css") + js = load_js("table.js") + css = load_css("style.css") + + # Add tabulator as a named module for VSCode compatibility + tabulator_js = tabulator_js.replace( + "define(t)", "define('tabulator-tables', [], t)" + ) + + icon = table_metadata.get("icon", None) + if icon is None: + icon = Icon.TABLE.svg + + column_data, row_header = create_tabulator_columns( + table_metadata["columns"], header_sort=header_sort + ) + table_data = format_table_data(table_data) + table_html = table_template.render( + uid=uid, + columns=json.dumps(column_data), + row_header=json.dumps(row_header), + data=json.dumps(table_data), + css=css, + js=js, + index_field_name=TABLE_INDEX_KEY, + icon=icon, + name=table_metadata["name"], + tabulator_js=tabulator_js, + tabulator_css=tabulator_css, + max_height=json.dumps(max_height), + pagination=json.dumps(pagination), + header_sort=json.dumps(header_sort), + ) + + return table_html + + +def build_tabulator_table_with_data( + table_data: list[dict], + table_metadata: dict, + uid: str | None = None, + max_height: int | None = None, + pagination: bool = True, + header_sort: bool = True, +) -> str | None: + try: + uid = uid if uid is not None else secrets.token_hex(4) + return _render_tabulator_table( + uid, table_data, table_metadata, max_height, pagination, header_sort + ) + except Exception as e: + logger.debug("error building table", e) + return None + + def build_tabulator_table( obj: Any, uid: str | None = None, @@ -106,49 +171,13 @@ def build_tabulator_table( table_data, table_metadata = prepare_table_data(obj) if len(table_data) == 0: return obj.__repr__() - - table_template = env.get_template("table.jinja2") - tabulator_js = load_js("tabulator.min.js") - tabulator_css = load_css("tabulator_pysyft.min.css") - js = load_js("table.js") - css = load_css("style.css") - - # Add tabulator as a named module for VSCode compatibility - tabulator_js = tabulator_js.replace( - "define(t)", "define('tabulator-tables', [], t)" - ) - - icon = table_metadata.get("icon", None) - if icon is None: - icon = Icon.TABLE.svg - uid = uid if uid is not None else secrets.token_hex(4) - column_data, row_header = create_tabulator_columns( - table_metadata["columns"], header_sort=header_sort - ) - table_data = format_table_data(table_data) - table_html = table_template.render( - uid=uid, - columns=json.dumps(column_data), - row_header=json.dumps(row_header), - data=json.dumps(table_data), - css=css, - js=js, - index_field_name=TABLE_INDEX_KEY, - icon=icon, - name=table_metadata["name"], - tabulator_js=tabulator_js, - tabulator_css=tabulator_css, - max_height=json.dumps(max_height), - pagination=json.dumps(pagination), - header_sort=json.dumps(header_sort), + return _render_tabulator_table( + uid, table_data, table_metadata, max_height, pagination, header_sort ) - - return table_html except Exception as e: logger.debug("error building table", e) - - return None + return None def show_table(obj: Any) -> None: From 219aa915c622874dbc6d93d5f6e0c41247615022 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 03:38:22 +0200 Subject: [PATCH 232/313] feat: update code_history with new table style --- .../syft/service/code_history/code_history.py | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/service/code_history/code_history.py b/packages/syft/src/syft/service/code_history/code_history.py index b5e893c87bf..092505a2278 100644 --- a/packages/syft/src/syft/service/code_history/code_history.py +++ b/packages/syft/src/syft/service/code_history/code_history.py @@ -11,7 +11,9 @@ from ...types.syft_object import SyftObject from ...types.syft_object import SyftVerifyKey from ...types.uid import UID -from ...util.notebook_ui.components.table_template import create_table_template +from ...util.notebook_ui.components.tabulator_template import ( + build_tabulator_table_with_data, +) from ...util.table import prepare_table_data from ..code.user_code import UserCode from ..response import SyftError @@ -55,8 +57,8 @@ def _coll_repr_(self) -> dict[str, int]: return {"Number of versions": len(self.user_code_history)} def _repr_html_(self) -> str: - # TODO techdebt: move this to _coll_repr_ - rows, _ = prepare_table_data(self.user_code_history) + rows, metadata = prepare_table_data(self.user_code_history) + for i, r in enumerate(rows): r["Version"] = f"v{i}" raw_code = self.user_code_history[i].raw_code @@ -64,8 +66,11 @@ def _repr_html_(self) -> str: if n_code_lines > 5: raw_code = "\n".join(raw_code.split("\n", 5)) r["Code"] = raw_code - # rows = sorted(rows, key=lambda x: x["Version"]) - return create_table_template(rows, "CodeHistory", icon=None) + + metadata["name"] = "Code History" + metadata["columns"] += ["Version", "Code"] + + return build_tabulator_table_with_data(rows, metadata) def __getitem__(self, index: int | str) -> UserCode | SyftError: if isinstance(index, str): @@ -138,6 +143,12 @@ def __getitem__(self, key: str | int) -> CodeHistoriesDict | SyftError: def _repr_html_(self) -> str: rows = [ - {"user": user, "UserCodes": funcs} for user, funcs in self.user_dict.items() + {"User": user, "UserCodes": ", ".join(funcs)} + for user, funcs in self.user_dict.items() ] - return create_table_template(rows, "UserCodeHistory", icon=None) + metadata = { + "name": "UserCode Histories", + "columns": ["User", "UserCodes"], + "icon": None, + } + return build_tabulator_table_with_data(rows, metadata) From dcc859e00a43684b73885541b274d686e3580145 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 03:38:50 +0200 Subject: [PATCH 233/313] refactor: display formatters in patch_ipython.py --- packages/syft/src/syft/util/patch_ipython.py | 21 +++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index 79ebdeabf71..904db6bf561 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -68,18 +68,21 @@ def _patch_ipython_sanitization() -> None: escaped_template = re.compile(table_template, re.DOTALL | re.VERBOSE) def display_sanitized_html(obj: SyftObject | DictTuple) -> str | None: - if hasattr(obj, "_repr_html_") and callable(obj._repr_html_): # type: ignore - _str = obj._repr_html_() # type: ignore - matching_template = escaped_template.findall(_str) - _str = escaped_template.sub("", _str) - _str = escaped_js_css.sub("", _str) - _str = nh3.clean(_str) - return f"{css_reinsert} {_str} {"\n".join(matching_template)}" + if callable(getattr(obj, "_repr_html_", None)): + html_str = obj._repr_html_() + if html_str is not None: + matching_template = escaped_template.findall(html_str) + sanitized_str = escaped_template.sub("", html_str) + sanitized_str = escaped_js_css.sub("", sanitized_str) + sanitized_str = nh3.clean(sanitized_str) + return f"{css_reinsert} {sanitized_str} {'\n'.join(matching_template)}" return None def display_sanitized_md(obj: SyftObject) -> str | None: - if hasattr(obj, "_repr_markdown_") and callable(obj._repr_markdown_): - return nh3.clean(obj._repr_markdown_()) + if callable(getattr(obj, "_repr_markdown_", None)): + md = obj._repr_markdown_() + if md is not None: + return nh3.clean(md) return None ip.display_formatter.formatters["text/html"].for_type( From 700fbe93f5513876dd80ac257e1fd2a9535e459a Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 03:39:17 +0200 Subject: [PATCH 234/313] feat: remove old table code and old js --- .../notebook_ui/components/table_template.py | 330 ------------------ packages/syft/src/syft/util/table.py | 22 +- packages/syft/tests/syft/notebook_ui_test.py | 10 - 3 files changed, 1 insertion(+), 361 deletions(-) delete mode 100644 packages/syft/src/syft/util/notebook_ui/components/table_template.py diff --git a/packages/syft/src/syft/util/notebook_ui/components/table_template.py b/packages/syft/src/syft/util/notebook_ui/components/table_template.py deleted file mode 100644 index 7de891af6a9..00000000000 --- a/packages/syft/src/syft/util/notebook_ui/components/table_template.py +++ /dev/null @@ -1,330 +0,0 @@ -# stdlib -from collections.abc import Sequence -import json -from string import Template - -# relative -from ....types.uid import UID -from ..icons import Icon -from ..styles import CSS_CODE - -TABLE_INDEX_KEY = "_table_repr_index" - -custom_code = """ - - -
-
-
${icon}
-

${list_name}

-
- -
-
-
-
-
- -
- -
- -
- -

0

-
-
- -
-
- -
-
-""" - - -def create_table_template( - table_data: Sequence, - name: str, - rows: int = 5, - icon: str | None = None, - grid_template_columns: str | None = None, - grid_template_cell_columns: str | None = None, - **kwargs: dict, -) -> str: - if icon is None: - icon = Icon.TABLE.svg - if grid_template_columns is None: - grid_template_columns = "1fr repeat({cols}, 1fr)" - if grid_template_cell_columns is None: - grid_template_cell_columns = "span 4" - - items_dict = json.dumps(table_data) - code = CSS_CODE + custom_code - template = Template(code) - rows = min(len(table_data), rows) - if len(table_data) == 0: - cols = 0 - else: - col_names = [k for k in table_data[0].keys() if k != TABLE_INDEX_KEY] - cols = (len(col_names)) * 4 - if "{cols}" in grid_template_columns: - grid_template_columns = grid_template_columns.format(cols=cols) - final_html = template.substitute( - uid=str(UID()), - element=items_dict, - list_name=name, - cols=cols, - rows=rows, - icon=icon, - searchIcon=Icon.SEARCH.svg, - clipboardIconEscaped=Icon.CLIPBOARD.js_escaped_svg, - grid_template_columns=grid_template_columns, - grid_template_cell_columns=grid_template_cell_columns, - ) - return final_html diff --git a/packages/syft/src/syft/util/table.py b/packages/syft/src/syft/util/table.py index f4965cb1ef0..5d380a65648 100644 --- a/packages/syft/src/syft/util/table.py +++ b/packages/syft/src/syft/util/table.py @@ -7,14 +7,12 @@ from typing import Any # third party -from loguru import logger import nh3 # relative -from .notebook_ui.components.table_template import TABLE_INDEX_KEY -from .notebook_ui.components.table_template import create_table_template from .util import full_name_with_qualname +TABLE_INDEX_KEY = "_table_repr_index" def _syft_in_mro(self: Any, item: Any) -> bool: if hasattr(type(item), "mro") and type(item) != type: @@ -239,21 +237,3 @@ def prepare_table_data( } return table_data, table_metadata - - -def list_dict_repr_html(self: Mapping | Set | Iterable) -> str | None: - try: - table_data, table_metadata = prepare_table_data(self) - if len(table_data) == 0: - # TODO cleanup tech debt: _repr_html_ is used in syft without `None` fallback. - return self.__repr__() - return create_table_template( - table_data=table_data, - **table_metadata, - ) - - except Exception as e: - logger.debug(f"Could not create table: {e}") - - # _repr_html_ returns None -> fallback to default repr - return None diff --git a/packages/syft/tests/syft/notebook_ui_test.py b/packages/syft/tests/syft/notebook_ui_test.py index eebc249dc82..32f591fedad 100644 --- a/packages/syft/tests/syft/notebook_ui_test.py +++ b/packages/syft/tests/syft/notebook_ui_test.py @@ -39,16 +39,6 @@ def table_test_cases() -> list[tuple[list, str | None]]: ] -@pytest.mark.parametrize("test_case", table_test_cases()) -def test_list_dict_repr_html(test_case): - obj, expected = test_case - - assert (obj._repr_html_() is not None) == expected - assert (dict(enumerate(obj))._repr_html_() is not None) == expected - assert (set(obj)._repr_html_() is not None) == expected - assert (tuple(obj)._repr_html_() is not None) == expected - - def test_sort_table_rows(): emails = [ "x@y.z", From 1331f732e84c07ca86901d245291f15d78ae67f6 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 03:45:30 +0200 Subject: [PATCH 235/313] lint --- packages/syft/src/syft/service/code_history/code_history.py | 4 ++-- packages/syft/src/syft/util/table.py | 1 + packages/syft/tests/syft/notebook_ui_test.py | 1 - 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/code_history/code_history.py b/packages/syft/src/syft/service/code_history/code_history.py index 092505a2278..c3b1151fe2e 100644 --- a/packages/syft/src/syft/service/code_history/code_history.py +++ b/packages/syft/src/syft/service/code_history/code_history.py @@ -56,7 +56,7 @@ class CodeHistoryView(SyftObject): def _coll_repr_(self) -> dict[str, int]: return {"Number of versions": len(self.user_code_history)} - def _repr_html_(self) -> str: + def _repr_html_(self) -> str | None: rows, metadata = prepare_table_data(self.user_code_history) for i, r in enumerate(rows): @@ -141,7 +141,7 @@ def __getitem__(self, key: str | int) -> CodeHistoriesDict | SyftError: ) return api.services.code_history.get_history_for_user(key) - def _repr_html_(self) -> str: + def _repr_html_(self) -> str | None: rows = [ {"User": user, "UserCodes": ", ".join(funcs)} for user, funcs in self.user_dict.items() diff --git a/packages/syft/src/syft/util/table.py b/packages/syft/src/syft/util/table.py index 5d380a65648..34439fa95df 100644 --- a/packages/syft/src/syft/util/table.py +++ b/packages/syft/src/syft/util/table.py @@ -14,6 +14,7 @@ TABLE_INDEX_KEY = "_table_repr_index" + def _syft_in_mro(self: Any, item: Any) -> bool: if hasattr(type(item), "mro") and type(item) != type: mro = type(item).mro() diff --git a/packages/syft/tests/syft/notebook_ui_test.py b/packages/syft/tests/syft/notebook_ui_test.py index 32f591fedad..ddff0824467 100644 --- a/packages/syft/tests/syft/notebook_ui_test.py +++ b/packages/syft/tests/syft/notebook_ui_test.py @@ -1,6 +1,5 @@ # third party import numpy as np -import pytest # syft absolute from syft.service.action.action_object import ActionObject From f9550f9a2eac64fe8e32e1889de0a71614fac49f Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 03:57:09 +0200 Subject: [PATCH 236/313] fix: f-string backslash --- packages/syft/src/syft/util/patch_ipython.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index 904db6bf561..c1a91f4cb73 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -72,10 +72,11 @@ def display_sanitized_html(obj: SyftObject | DictTuple) -> str | None: html_str = obj._repr_html_() if html_str is not None: matching_template = escaped_template.findall(html_str) + matching_template = '\n'.join(matching_template) sanitized_str = escaped_template.sub("", html_str) sanitized_str = escaped_js_css.sub("", sanitized_str) sanitized_str = nh3.clean(sanitized_str) - return f"{css_reinsert} {sanitized_str} {'\n'.join(matching_template)}" + return f"{css_reinsert} {sanitized_str} {matching_template}" return None def display_sanitized_md(obj: SyftObject) -> str | None: From b57b056823dd26f84494e6a01611e14b3fda7134 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 03:59:57 +0200 Subject: [PATCH 237/313] tests: reintroduce list_dict_repr --- packages/syft/tests/syft/notebook_ui_test.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/packages/syft/tests/syft/notebook_ui_test.py b/packages/syft/tests/syft/notebook_ui_test.py index ddff0824467..eebc249dc82 100644 --- a/packages/syft/tests/syft/notebook_ui_test.py +++ b/packages/syft/tests/syft/notebook_ui_test.py @@ -1,5 +1,6 @@ # third party import numpy as np +import pytest # syft absolute from syft.service.action.action_object import ActionObject @@ -38,6 +39,16 @@ def table_test_cases() -> list[tuple[list, str | None]]: ] +@pytest.mark.parametrize("test_case", table_test_cases()) +def test_list_dict_repr_html(test_case): + obj, expected = test_case + + assert (obj._repr_html_() is not None) == expected + assert (dict(enumerate(obj))._repr_html_() is not None) == expected + assert (set(obj)._repr_html_() is not None) == expected + assert (tuple(obj)._repr_html_() is not None) == expected + + def test_sort_table_rows(): emails = [ "x@y.z", From 5a1863f3b428ba50d47bbd58cf9503835e5258fe Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 04:08:14 +0200 Subject: [PATCH 238/313] lint --- packages/syft/src/syft/util/patch_ipython.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index c1a91f4cb73..642b75a590d 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -72,11 +72,11 @@ def display_sanitized_html(obj: SyftObject | DictTuple) -> str | None: html_str = obj._repr_html_() if html_str is not None: matching_template = escaped_template.findall(html_str) - matching_template = '\n'.join(matching_template) + template = "\n".join(matching_template) sanitized_str = escaped_template.sub("", html_str) sanitized_str = escaped_js_css.sub("", sanitized_str) sanitized_str = nh3.clean(sanitized_str) - return f"{css_reinsert} {sanitized_str} {matching_template}" + return f"{css_reinsert} {sanitized_str} {template}" return None def display_sanitized_md(obj: SyftObject) -> str | None: From 6aee14f432199f9f8309f55894e4c27ae34626d9 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Fri, 21 Jun 2024 11:17:50 +0200 Subject: [PATCH 239/313] fix code_histories for a particular user --- .../code_history/code_history_service.py | 25 ++++++++++++++----- .../syft/tests/syft/users/user_code_test.py | 3 +++ 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/packages/syft/src/syft/service/code_history/code_history_service.py b/packages/syft/src/syft/service/code_history/code_history_service.py index 54da3d491b3..2c01b26fd1a 100644 --- a/packages/syft/src/syft/service/code_history/code_history_service.py +++ b/packages/syft/src/syft/service/code_history/code_history_service.py @@ -8,6 +8,7 @@ from ...util.telemetry import instrument from ..code.user_code import SubmitUserCode from ..code.user_code import UserCode +from ..code.user_code_service import UserCodeService from ..context import AuthedServiceContext from ..response import SyftError from ..response import SyftSuccess @@ -15,6 +16,7 @@ from ..service import service_method from ..user.user_roles import DATA_OWNER_ROLE_LEVEL from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL +from ..user.user_roles import ServiceRole from .code_history import CodeHistoriesDict from .code_history import CodeHistory from .code_history import CodeHistoryView @@ -115,14 +117,22 @@ def delete( def fetch_histories_for_user( self, context: AuthedServiceContext, user_verify_key: SyftVerifyKey ) -> CodeHistoriesDict | SyftError: - result = self.stash.get_by_verify_key( - credentials=context.credentials, user_verify_key=user_verify_key - ) + if context.role in [ServiceRole.DATA_OWNER, ServiceRole.ADMIN]: + result = self.stash.get_by_verify_key( + credentials=context.node.verify_key, user_verify_key=user_verify_key + ) + else: + result = self.stash.get_by_verify_key( + credentials=context.credentials, user_verify_key=user_verify_key + ) - user_code_service = context.node.get_service("usercodeservice") + user_code_service: UserCodeService = context.node.get_service("usercodeservice") def get_code(uid: UID) -> UserCode | SyftError: - return user_code_service.get_by_uid(context=context, uid=uid) + return user_code_service.stash.get_by_uid( + credentials=context.node.verify_key, + uid=uid, + ).ok() if result.is_ok(): code_histories = result.ok() @@ -181,7 +191,10 @@ def get_history_for_user( def get_histories_group_by_user( self, context: AuthedServiceContext ) -> UsersCodeHistoriesDict | SyftError: - result = self.stash.get_all(credentials=context.credentials) + if context.role in [ServiceRole.DATA_OWNER, ServiceRole.ADMIN]: + result = self.stash.get_all(context.credentials, has_permission=True) + else: + result = self.stash.get_all(context.credentials) if result.is_err(): return SyftError(message=result.err()) code_histories: list[CodeHistory] = result.ok() diff --git a/packages/syft/tests/syft/users/user_code_test.py b/packages/syft/tests/syft/users/user_code_test.py index fd9e6b79cb5..a1182b1630a 100644 --- a/packages/syft/tests/syft/users/user_code_test.py +++ b/packages/syft/tests/syft/users/user_code_test.py @@ -113,8 +113,11 @@ def test_duplicated_user_code(worker) -> None: code_histories = worker.root_client.code_histories user_code_history = code_histories[ds_client.logged_in_user] + assert not isinstance(code_histories, SyftError) assert not isinstance(user_code_history, SyftError) assert user_code_history.code_versions, "No code version found." + assert user_code_history.mock_syft_func.user_code_history[0].status is not None + assert user_code_history.mock_syft_func[0]._repr_markdown_(), "repr markdown failed" result = user_code_history.mock_syft_func_2[0]() assert result.get() == 1 From 4d303cb8622f03416af7fda6d76598605ff24ab5 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Fri, 21 Jun 2024 11:46:57 +0200 Subject: [PATCH 240/313] add tag to request --- notebooks/Bigquery full flow.ipynb | 613 +++++++++++++----- .../src/syft/protocol/protocol_version.json | 7 + .../syft/service/code/user_code_service.py | 7 +- .../syft/src/syft/service/request/request.py | 61 +- .../syft/service/request/request_service.py | 23 +- 5 files changed, 557 insertions(+), 154 deletions(-) diff --git a/notebooks/Bigquery full flow.ipynb b/notebooks/Bigquery full flow.ipynb index dfb2755060a..6337b4847e4 100644 --- a/notebooks/Bigquery full flow.ipynb +++ b/notebooks/Bigquery full flow.ipynb @@ -30,7 +30,11 @@ " # True if this is a new low-side request\n", " # TODO add condition for sql requests/usercodes\n", " low_request = batch.root.low_obj\n", - " return isinstance(low_request, Request) and batch.status == \"NEW\"\n", + " return (\n", + " isinstance(low_request, Request)\n", + " and batch.status == \"NEW\"\n", + " and \"autosync\" in low_request.tags\n", + " )\n", "\n", "\n", "def is_job_to_sync(batch: ObjectDiffBatch):\n", @@ -81,11 +85,8 @@ " if api_func is None:\n", " continue\n", "\n", - " # job = api_func(endpoint=client_high.api.services.reddit.query, blocking=False)\n", " job = api_func(blocking=False)\n", " jobs_by_request_id[request_id] = job\n", - " # sleep to prevent SQLite connection pool issues\n", - " time.sleep(1)\n", "\n", " return jobs_by_request_id\n", "\n", @@ -138,15 +139,7 @@ " print(\"Starting auto sync\")\n", " sync_and_execute_new_requests(client_low, client_high)\n", " sync_finished_jobs(client_low, client_high)\n", - " print(\"Finished auto sync\")\n", - "\n", - "\n", - "def auto_sync_loop(\n", - " client_low: DomainClient, client_high: DomainClient, sleep_seconds: int = 60\n", - ") -> None:\n", - " while True:\n", - " auto_sync(client_low, client_high)\n", - " time.sleep(sleep_seconds)" + " print(\"Finished auto sync\")\n" ] }, { @@ -179,21 +172,19 @@ "output_type": "stream", "text": [ "Staging Protocol Changes...\n", - "Document Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", - "Action Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", + "Document Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", + "Action Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", "Creating default worker image with tag='local-dev'\n", - "Setting up worker poolname=default-pool workers=1 image_uid=a7d247e176c84cfa8457ebeb803c2f19 in_memory=True\n", + "Setting up worker poolname=default-pool workers=1 image_uid=b5fa6320676a4ba78a4dc18fd1abd9ac in_memory=True\n", "Created default worker pool.\n", "Data Migrated to latest version !!!\n", "Staging Protocol Changes...\n", - "Document Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", - "Action Store's SQLite DB path: /var/folders/q1/ryq93kwj055dlbpngxv1c7z40000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", + "Document Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", + "Action Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", "Creating default worker image with tag='local-dev'\n", - "Setting up worker poolname=default-pool workers=4 image_uid=a134a04a8f2c4a01ace797a792d3fe6b in_memory=True\n", + "Setting up worker poolname=default-pool workers=4 image_uid=f76e1c51f2b74d73b6da83e4ef07008a in_memory=True\n", "Created default worker pool.\n", - "Data Migrated to latest version !!!\n", - "Logged into as GUEST\n", - "Logged into as GUEST\n" + "Data Migrated to latest version !!!\n" ] } ], @@ -274,7 +265,9 @@ "source": [ "client_high = high_side.login(email=\"info@openmined.org\", password=\"changethis\")\n", "client_low = low_side.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "client_low.register(email=\"newuser@openmined.org\", name=\"John Doe\", password=\"pw\", password_verify=\"pw\")\n", + "client_low.register(\n", + " email=\"newuser@openmined.org\", name=\"John Doe\", password=\"pw\", password_verify=\"pw\"\n", + ")\n", "client_low_ds = low_side.login(email=\"newuser@openmined.org\", password=\"pw\")" ] }, @@ -296,19 +289,20 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 5, "id": "3eb31229-93e8-46fe-ba0c-61d94d5d1910", "metadata": {}, "outputs": [], "source": [ "import json\n", + "\n", "with open(\"./credentials.json\", \"r\") as f:\n", " BQ_CREDENTIALS = json.loads(f.read())" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 6, "id": "d1758200-c979-48df-b962-9f8f3c5737c0", "metadata": {}, "outputs": [ @@ -321,7 +315,7 @@ "SyftSuccess: Endpoint successfully created." ] }, - "execution_count": 8, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } @@ -329,9 +323,8 @@ "source": [ "# Mock API\n", "\n", - "@sy.api_endpoint_method(\n", - " settings={}\n", - ")\n", + "\n", + "@sy.api_endpoint_method(settings={})\n", "def mock_query_function(\n", " context,\n", " sql_query: str,\n", @@ -366,9 +359,7 @@ "\n", "\n", "# Private API\n", - "@sy.api_endpoint_method(\n", - " settings=BQ_CREDENTIALS\n", - ")\n", + "@sy.api_endpoint_method(settings=BQ_CREDENTIALS)\n", "def private_query_function(\n", " context,\n", " sql_query: str,\n", @@ -381,12 +372,16 @@ " from syft.service.response import SyftError\n", "\n", " # Client query\n", - " credentials = service_account.Credentials.from_service_account_info(context.settings)\n", - " scoped_credentials = credentials.with_scopes(['https://www.googleapis.com/auth/cloud-platform'])\n", + " credentials = service_account.Credentials.from_service_account_info(\n", + " context.settings\n", + " )\n", + " scoped_credentials = credentials.with_scopes(\n", + " [\"https://www.googleapis.com/auth/cloud-platform\"]\n", + " )\n", "\n", " client = bigquery.Client(\n", " credentials=scoped_credentials,\n", - " location=\"us-west1\", \n", + " location=\"us-west1\",\n", " )\n", " # Generate mock data\n", " rows = client.query_and_wait(\n", @@ -394,7 +389,9 @@ " project=\"reddit-testing-415005\",\n", " )\n", " if rows.total_rows > 40000:\n", - " return SyftError(message=\"Please only write queries that gather aggregate statistics\")\n", + " return SyftError(\n", + " message=\"Please only write queries that gather aggregate statistics\"\n", + " )\n", " # Create DataFrame\n", " res = rows.to_dataframe()\n", " return res\n", @@ -413,13 +410,15 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 7, "id": "f67f6643-727e-4c92-922a-04e58c7ac42c", "metadata": {}, "outputs": [], "source": [ "if False:\n", - " client_high.api.services.reddit.query.private(sql_query=\"SELECT * from data_10gb.comments LIMIT 40\").head()" + " client_high.api.services.reddit.query.private(\n", + " sql_query=\"SELECT * from data_10gb.comments LIMIT 40\"\n", + " ).head()" ] }, { @@ -432,7 +431,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 8, "id": "d076c2b5-f58c-4385-8cde-8b575631c3bc", "metadata": {}, "outputs": [ @@ -451,7 +450,7 @@ { "data": { "text/html": [ - "" + "" ], "text/plain": [ "" @@ -476,7 +475,7 @@ "SyftSuccess: Synced 1 items" ] }, - "execution_count": 10, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -496,36 +495,49 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 9, "id": "2e3f599f-86ed-4cb8-ae7a-170f5fd59ef8", "metadata": {}, "outputs": [], "source": [ "@sy.api_endpoint(path=\"reddit.submit_query\")\n", "def submit_query(\n", - " context, func_name: str, query: str,\n", + " context,\n", + " func_name: str,\n", + " query: str,\n", ") -> str:\n", " import syft as sy\n", "\n", " if not func_name.isalpha():\n", - " return sy.SyftError(message=\"Please only use alphabetic characters for your func_name\")\n", + " return sy.SyftError(\n", + " message=\"Please only use alphabetic characters for your func_name\"\n", + " )\n", "\n", - " @sy.syft_function(name=func_name,\n", - " input_policy=sy.MixedInputPolicy(endpoint=sy.Constant(val=context.admin_client.api.services.reddit.query),\n", - " query=sy.Constant(val=query),\n", - " client=context.admin_client))\n", + " @sy.syft_function(\n", + " name=func_name,\n", + " input_policy=sy.MixedInputPolicy(\n", + " endpoint=sy.Constant(val=context.admin_client.api.services.reddit.query),\n", + " query=sy.Constant(val=query),\n", + " client=context.admin_client,\n", + " ),\n", + " )\n", " def execute_query(query: str, endpoint):\n", - " res = endpoint.private(sql_query=query);\n", + " res = endpoint.private(sql_query=query)\n", " return res\n", - " \n", - " res = context.user_client.code.request_code_execution(execute_query)\n", "\n", - " return f\"Query submitted {res}, use `client.code.{func_name}()` to run your query\"" + " request = context.user_client.code.request_code_execution(execute_query)\n", + " if isinstance(request, sy.SyftError):\n", + " return request\n", + " context.admin_client.requests.set_tags(request, [\"autosync\"])\n", + "\n", + " return (\n", + " f\"Query submitted {request}, use `client.code.{func_name}()` to run your query\"\n", + " )" ] }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 10, "id": "02c6287f-322e-469f-a2ae-666fc17c6dac", "metadata": {}, "outputs": [ @@ -538,7 +550,7 @@ "SyftSuccess: Endpoint successfully created." ] }, - "execution_count": 12, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } @@ -557,10 +569,18 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "id": "027d4479-0f47-4647-9c00-7b7d87f6a80f", "metadata": {}, "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as GUEST\n", + "Logged into as GUEST\n" + ] + }, { "data": { "text/html": [ @@ -575,23 +595,35 @@ } ], "source": [ - "submit_res = client_low_ds.api.services.reddit.submit_query(func_name=\"myquery\",\n", - " query=\"SELECT * from data_10gb.comments LIMIT 40\")" + "submit_res = client_low_ds.api.services.reddit.submit_query(\n", + " func_name=\"myquery\", query=\"SELECT * from data_10gb.comments LIMIT 40\"\n", + ")" ] }, { "cell_type": "code", - "execution_count": null, - "id": "c19b0ff1-4b28-4e3c-99ca-3778e5ba52f6", + "execution_count": 12, + "id": "dd60a815-e435-450e-9528-ac2bfbc9ee62", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "['autosync']" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "submit_res" + "client_low.requests[0].tags" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "id": "a7573008-bb4f-43b6-84da-bf797ec9dac6", "metadata": {}, "outputs": [], @@ -609,10 +641,182 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "id": "e518ba33-9d2a-40a2-a6b8-174bc8392c77", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Starting auto sync\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Synced 0 new requests\n", + "Started 0 new jobs\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Decision: Syncing 5 objects\n", + "Sharing 1 new results\n", + "Finished auto sync\n", + "Starting auto sync\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Synced 0 new requests\n", + "Started 0 new jobs\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sharing 0 new results\n", + "Finished auto sync\n", + "Starting auto sync\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Synced 0 new requests\n", + "Started 0 new jobs\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sharing 0 new results\n", + "Finished auto sync\n", + "Starting auto sync\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Synced 0 new requests\n", + "Started 0 new jobs\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + ], + "text/plain": [ + "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sharing 0 new results\n", + "Finished auto sync\n" + ] + } + ], "source": [ "# do forever\n", "for i in range(4):\n", @@ -629,98 +833,212 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "id": "f92240e6-0c35-4cd3-b52d-a5fa3256e3f9", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "
SyftWarning:
Loading results from cache.

" + ], + "text/plain": [ + "SyftWarning: Loading results from cache." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "res = client_low_ds.code.myquery()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "id": "23050872-2f0b-4d23-9c1f-57ad74ac09d5", "metadata": {}, - "outputs": [], - "source": [ - "df = res.get()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bb13bcf7-1e1b-4575-84ca-570f13a570c2", - "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
idpost_idparent_idcreated_atlast_modified_atbodyauthor_idgildedscoreupvote_ratiodeletedcollapsed_in_crowd_controlspamsubreddit_idpermalink
0t1_jsrssaat3_3eq9p3rt1_j0bm0qn2020-02-05 13:15:44+00:00NaTWASHINGTON (AP) — The federal government groun...t2_31y14bfhFalse30.65FalseFalseFalset5_7i2tp/r/t5_7i2tp/comments/eq9p3r/comment/jsrssaa
1t1_z014wynt3_mtoy3viNone2020-02-05 13:15:44+00:00NaTHe was indicted on 16 felony charges, includin...t2_iemo2ikgFalse81.00FalseFalseFalset5_xg19m/r/t5_xg19m/comments/mtoy3vi/comment/z014wyn
2t1_8ttp66lt3_is0dk32None2020-02-05 13:15:44+00:00NaTNonet2_csenfqwlFalse61.00FalseFalseFalset5_unjsw/r/t5_unjsw/comments/is0dk32/comment/8ttp66l
3t1_qhuklsmt3_7ajgpjeNone2020-02-05 13:15:44+00:00NaTThese nachos are so sinful; it's hard to stop ...t2_2ztp96r7False70.69FalseFalseFalset5_91cqb/r/t5_91cqb/comments/7ajgpje/comment/qhuklsm
4t1_8nkh2zbt3_oygwavxt1_0mzt6bq2020-02-05 13:15:44+00:00NaTWhen we last checked in with Charles Platkin, ...t2_o79jr0e0False51.00TrueFalseFalset5_y71mw/r/t5_y71mw/comments/oygwavx/comment/8nkh2zb
\n", + "
" + ], + "text/plain": [ + " id post_id parent_id created_at \\\n", + "0 t1_jsrssaa t3_3eq9p3r t1_j0bm0qn 2020-02-05 13:15:44+00:00 \n", + "1 t1_z014wyn t3_mtoy3vi None 2020-02-05 13:15:44+00:00 \n", + "2 t1_8ttp66l t3_is0dk32 None 2020-02-05 13:15:44+00:00 \n", + "3 t1_qhuklsm t3_7ajgpje None 2020-02-05 13:15:44+00:00 \n", + "4 t1_8nkh2zb t3_oygwavx t1_0mzt6bq 2020-02-05 13:15:44+00:00 \n", + "\n", + " last_modified_at body \\\n", + "0 NaT WASHINGTON (AP) — The federal government groun... \n", + "1 NaT He was indicted on 16 felony charges, includin... \n", + "2 NaT None \n", + "3 NaT These nachos are so sinful; it's hard to stop ... \n", + "4 NaT When we last checked in with Charles Platkin, ... \n", + "\n", + " author_id gilded score upvote_ratio deleted \\\n", + "0 t2_31y14bfh False 3 0.65 False \n", + "1 t2_iemo2ikg False 8 1.00 False \n", + "2 t2_csenfqwl False 6 1.00 False \n", + "3 t2_2ztp96r7 False 7 0.69 False \n", + "4 t2_o79jr0e0 False 5 1.00 True \n", + "\n", + " collapsed_in_crowd_control spam subreddit_id \\\n", + "0 False False t5_7i2tp \n", + "1 False False t5_xg19m \n", + "2 False False t5_unjsw \n", + "3 False False t5_91cqb \n", + "4 False False t5_y71mw \n", + "\n", + " permalink \n", + "0 /r/t5_7i2tp/comments/eq9p3r/comment/jsrssaa \n", + "1 /r/t5_xg19m/comments/mtoy3vi/comment/z014wyn \n", + "2 /r/t5_unjsw/comments/is0dk32/comment/8ttp66l \n", + "3 /r/t5_91cqb/comments/7ajgpje/comment/qhuklsm \n", + "4 /r/t5_y71mw/comments/oygwavx/comment/8nkh2zb " + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "df.head()" + "res.get().head()" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "024d9969-3345-41a6-b904-9a04ab4bfdad", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2d851f1e-75af-4d0f-8523-f30e2d0d9951", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14065b63-0102-4519-9749-dbf05c91c22b", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "77ff0fc4-243a-4d2e-acb4-cd29570855d1", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "dd79cf84-9ac2-41ce-ae10-862754917be1", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12604e72-5229-4dfc-8b55-802ef335d356", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "85ce2d7c-cd95-497b-b39d-6e72f3d808a2", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9dc3f93f-1126-43b6-86be-a7b93b5a4af4", - "metadata": {}, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": null, @@ -747,8 +1065,7 @@ "# @sy.syft_function(input_policy=sy.MixedInputPolicy(endpoint=client_high.api.services.reddit.query, client=client_high))\n", "# def execute_query(endpoint):\n", "# res = endpoint(sql_query=\"SELECT * from data_10gb.comments LIMIT 40\");\n", - "# return res\n", - "\n" + "# return res" ] }, { @@ -791,7 +1108,7 @@ "# @sy.syft_function(input_policy=sy.ExactMatch(endpoint=client_high.api.services.reddit.query))\n", "# def execute_query(endpoint):\n", "# res = endpoint(sql_query=\"SELECT * from data_10gb.comments LIMIT 40\");\n", - "# return res\n" + "# return res" ] }, { @@ -842,7 +1159,7 @@ "\n", "# @sy.syft_function(name=func_name, input_policy=sy.MixedInputPolicy(query=sy.Constant(val=query),client=context.admin_client)\n", "# def execute_query(query: str):\n", - " \n", + "\n", "# return f\"your query {query} was EXECUTED\"\n", "\n", "# res = context.user_client.code.request_code_execution(execute_query)\n", @@ -922,7 +1239,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.10.13" } }, "nbformat": 4, diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 9687b1ac4ce..b225c70c9b7 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -340,6 +340,13 @@ "hash": "0e84e4c91e378717e1a4703574b07e3b1e6a3e5707401b4e0cc8d30088a506b9", "action": "add" } + }, + "Request": { + "3": { + "version": 3, + "hash": "ba9ebb04cc3e8b3ae3302fd42a67e47261a0a330bae5f189d8f4819cf2804711", + "action": "add" + } } } } diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 9287e49fec4..a18ff55284f 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -258,7 +258,7 @@ def request_code_execution( context: AuthedServiceContext, code: SubmitUserCode, reason: str | None = "", - ) -> SyftSuccess | SyftError: + ) -> Request | SyftError: """Request Code execution on user code""" return self._request_code_execution(context=context, code=code, reason=reason) @@ -378,8 +378,9 @@ def is_execution_allowed( context: AuthedServiceContext, output_policy: OutputPolicy | None, ) -> bool | SyftSuccess | SyftError | SyftNotReady: - if not code.get_status(context).approved: - return code.status.get_status_message() + status = code.get_status(context) + if not status.approved: + return status.get_status_message() # Check if the user has permission to execute the code. elif not (has_code_permission := self.has_code_permission(code, context)): return has_code_permission diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 882dd243ec4..78630b561b1 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -10,6 +10,7 @@ from result import Err from result import Ok from result import Result +from syft.types.syft_migration import migrate from typing_extensions import Self # relative @@ -28,7 +29,7 @@ from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...types.syft_object import SyftObject from ...types.syncable_object import SyncableSyftObject -from ...types.transforms import TransformContext +from ...types.transforms import TransformContext, drop, make_set_default from ...types.transforms import add_node_uid_for_key from ...types.transforms import generate_id from ...types.transforms import transform @@ -387,7 +388,7 @@ class CreateCustomWorkerPoolChangeV2(Change): @serializable() -class Request(SyncableSyftObject): +class RequestV2(SyncableSyftObject): __canonical_name__ = "Request" __version__ = SYFT_OBJECT_VERSION_2 @@ -402,6 +403,48 @@ class Request(SyncableSyftObject): request_hash: str changes: list[Change] history: list[ChangeStatus] = [] + __table_coll_widths__ = [ + "min-content", + "auto", + "auto", + "auto", + "auto", + "auto", + ] + + __attr_searchable__ = [ + "requesting_user_verify_key", + "approving_user_verify_key", + ] + __attr_unique__ = ["request_hash"] + __repr_attrs__ = [ + "request_time", + "updated_at", + "status", + "changes", + "requesting_user_verify_key", + ] + __exclude_sync_diff_attrs__ = ["node_uid", "changes", "history"] + __table_sort_attr__ = "Request time" + + +@serializable() +class Request(SyncableSyftObject): + __canonical_name__ = "Request" + __version__ = SYFT_OBJECT_VERSION_3 + + requesting_user_verify_key: SyftVerifyKey + requesting_user_name: str = "" + requesting_user_email: str | None = "" + requesting_user_institution: str | None = "" + approving_user_verify_key: SyftVerifyKey | None = None + request_time: DateTime + updated_at: DateTime | None = None + node_uid: UID + request_hash: str + changes: list[Change] + history: list[ChangeStatus] = [] + tags: list[str] = [] __table_coll_widths__ = [ "min-content", @@ -1364,3 +1407,17 @@ def _run( def link(self) -> Any: # type: ignore return self.code.status + + +@migrate(RequestV2, Request) +def migrate_request_v2_to_v3() -> list[Callable]: + return [ + make_set_default("tags", []), + ] + + +@migrate(Request, RequestV2) +def migrate_usercode_v5_to_v4() -> list[Callable]: + return [ + drop("tags"), + ] diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index 1bd76c4cc24..7edadc9a888 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -24,7 +24,7 @@ from ..service import TYPE_TO_SERVICE from ..service import service_method from ..user.user import UserView -from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL +from ..user.user_roles import ADMIN_ROLE_LEVEL, DATA_SCIENTIST_ROLE_LEVEL from ..user.user_roles import GUEST_ROLE_LEVEL from ..user.user_service import UserService from .request import Change @@ -312,6 +312,27 @@ def delete_by_uid( return SyftError(message=str(result.err())) return SyftSuccess(message=f"Request with id {uid} deleted.") + @service_method( + path="request.set_tags", + name="set_tags", + roles=ADMIN_ROLE_LEVEL, + ) + def set_tags( + self, + context: AuthedServiceContext, + request: Request, + tags: list[str], + ) -> Request | SyftError: + request = self.stash.get_by_uid(context.credentials, request.id) + if request.is_err(): + return SyftError(message=str(request.err())) + if request.ok() is None: + return SyftError(message="Request does not exist.") + request = request.ok() + + request.tags = tags + return self.save(context, request) + TYPE_TO_SERVICE[Request] = RequestService SERVICE_TO_TYPES[RequestService].update({Request}) From 5b8b5d3a09d81d77c036f2de9ec658a1e252723d Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Fri, 21 Jun 2024 11:59:51 +0200 Subject: [PATCH 241/313] reproduce bug https://github.com/OpenMined/Heartbeat/issues/1503 --- tests/integration/local/twin_api_sync_test.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/integration/local/twin_api_sync_test.py b/tests/integration/local/twin_api_sync_test.py index 9212b7d6905..c6b1a152789 100644 --- a/tests/integration/local/twin_api_sync_test.py +++ b/tests/integration/local/twin_api_sync_test.py @@ -121,6 +121,12 @@ def compute(query): private_res, SyftError ), "Should not be able to access private function on low side." + # updating twin api endpoint works + high_client.custom_api.update(endpoint_path="testapi.query", endpoint_timeout=60) + widget = sy.sync(from_client=high_client, to_client=low_client) + result = widget[0].click_sync() + assert result, result + @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") @pytest.mark.local_node From 696e73b3933561ec5e23c4284ae9acd7b65d3390 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 12:06:23 +0200 Subject: [PATCH 242/313] bump --- packages/syft/src/syft/assets/css/style.css | 18 ++++++++++++++++++ packages/syft/src/syft/service/response.py | 4 ++-- .../syft/util/notebook_ui/components/sync.py | 1 + .../components/tabulator_template.py | 6 +++--- packages/syft/src/syft/util/patch_ipython.py | 8 ++++++-- packages/syft/src/syft/util/table.py | 2 +- 6 files changed, 31 insertions(+), 8 deletions(-) diff --git a/packages/syft/src/syft/assets/css/style.css b/packages/syft/src/syft/assets/css/style.css index 528046d6668..113a9908055 100644 --- a/packages/syft/src/syft/assets/css/style.css +++ b/packages/syft/src/syft/assets/css/style.css @@ -5,6 +5,7 @@ body.vscode-dark { --tertiary-color: #cfcdd6; --button-color: #111111; --colors-black: #ffffff; + --surface-color: #fff; } body { @@ -13,6 +14,7 @@ body { --tertiary-color: #000000de; --button-color: #d1d5db; --colors-black: #17161d; + --surface-color: #464158; } .header-1 { @@ -564,3 +566,19 @@ body { .syft-widget li a:hover { background-color: #c2def0; } + +.syft-user_code, .syft-project, .syft-project-create, .syft-dataset, .syft-syncstate { + color: var(--surface-color); +} + +.syft-dataset h3, +.syft-dataset p, +.syft-syncstate h3, +.syft-syncstate p { + font-family: 'Open Sans'; +} + +.diff-container { + border: 0.5px solid #B4B0BF; +} + diff --git a/packages/syft/src/syft/service/response.py b/packages/syft/src/syft/service/response.py index f924dfc48d7..12070a42b54 100644 --- a/packages/syft/src/syft/service/response.py +++ b/packages/syft/src/syft/service/response.py @@ -45,7 +45,7 @@ def _repr_html_(self) -> str: f'
' f"{type(self).__name__}: " f'
'
-            f"{nh3.clean(self.message)}

" + f"{nh3.clean(self.message, clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}})}
" ) @@ -108,7 +108,7 @@ def _repr_html_class_(self) -> str: def _repr_html_(self) -> str: return ( f'
' - + f"{type(self).__name__}: {nh3.clean(self.args)}

" + + f"{type(self).__name__}: {nh3.clean(self.args, clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}})}
" ) @staticmethod diff --git a/packages/syft/src/syft/util/notebook_ui/components/sync.py b/packages/syft/src/syft/util/notebook_ui/components/sync.py index 4fdd0adf1b3..9e8877ed70c 100644 --- a/packages/syft/src/syft/util/notebook_ui/components/sync.py +++ b/packages/syft/src/syft/util/notebook_ui/components/sync.py @@ -3,6 +3,7 @@ from typing import Any # third party +import nh3 from pydantic import model_validator # relative diff --git a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py index 69c172181b7..9199aa0acbc 100644 --- a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py +++ b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py @@ -69,7 +69,7 @@ def format_dict(data: Any) -> str: return data if set(data.keys()) != {"type", "value"}: - return nh3.clean(str(data)) + return nh3.clean(str(data), clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}}) if "badge" in data["type"]: return Badge(value=data["value"], badge_class=data["type"]).to_html() @@ -78,7 +78,7 @@ def format_dict(data: Any) -> str: if "clipboard" in data["type"]: return CopyButton(copy_text=data["value"]).to_html() - return nh3.clean(str(data)) + return nh3.clean(str(data), clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}}) def format_table_data(table_data: list[dict[str, Any]]) -> list[dict[str, str]]: @@ -87,7 +87,7 @@ def format_table_data(table_data: list[dict[str, Any]]) -> list[dict[str, str]]: row_formatted: dict[str, str] = {} for k, v in row.items(): if isinstance(v, str): - row_formatted[k] = nh3.clean(v.replace("\n", "
")) + row_formatted[k] = nh3.clean(v.replace("\n", "
"), clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}}) continue v_formatted = format_dict(v) row_formatted[k] = v_formatted diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index 642b75a590d..79d050ba955 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -32,6 +32,8 @@ def _patch_ipython_sanitization() -> None: from .notebook_ui.styles import FONT_CSS from .notebook_ui.styles import ITABLES_CSS from .notebook_ui.styles import JS_DOWNLOAD_FONTS + from .notebook_ui.components.sync import ALERT_CSS + from .notebook_ui.components.sync import COPY_CSS tabulator_js = load_js("tabulator.min.js") tabulator_js = tabulator_js.replace( @@ -53,6 +55,8 @@ def _patch_ipython_sanitization() -> None: {JS_DOWNLOAD_FONTS} {CSS_CODE} + + """ escaped_js_css = re.compile( @@ -75,7 +79,7 @@ def display_sanitized_html(obj: SyftObject | DictTuple) -> str | None: template = "\n".join(matching_template) sanitized_str = escaped_template.sub("", html_str) sanitized_str = escaped_js_css.sub("", sanitized_str) - sanitized_str = nh3.clean(sanitized_str) + sanitized_str = nh3.clean(sanitized_str, clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}}) return f"{css_reinsert} {sanitized_str} {template}" return None @@ -83,7 +87,7 @@ def display_sanitized_md(obj: SyftObject) -> str | None: if callable(getattr(obj, "_repr_markdown_", None)): md = obj._repr_markdown_() if md is not None: - return nh3.clean(md) + return nh3.clean(md, clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}}) return None ip.display_formatter.formatters["text/html"].for_type( diff --git a/packages/syft/src/syft/util/table.py b/packages/syft/src/syft/util/table.py index 34439fa95df..60510161a5c 100644 --- a/packages/syft/src/syft/util/table.py +++ b/packages/syft/src/syft/util/table.py @@ -134,7 +134,7 @@ def _create_table_rows( except Exception as e: print(e) value = None - cols[field].append(nh3.clean(str(value))) + cols[field].append(nh3.clean(str(value), clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}})) col_lengths = {len(cols[col]) for col in cols.keys()} if len(col_lengths) != 1: From bf3b94de3b13ff95a0c02206af1a3bcff3345953 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Fri, 21 Jun 2024 12:10:06 +0200 Subject: [PATCH 243/313] fix err type Mypy should have caught this error, but it didn't and still doesn't. I couldn't figure out why, so I'm just fixing it manually. --- packages/syft/src/syft/service/sync/sync_service.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/sync/sync_service.py b/packages/syft/src/syft/service/sync/sync_service.py index db50c2a7a61..8844a3d05d3 100644 --- a/packages/syft/src/syft/service/sync/sync_service.py +++ b/packages/syft/src/syft/service/sync/sync_service.py @@ -24,6 +24,7 @@ from ..action.action_permissions import ActionPermission from ..action.action_permissions import StoragePermission from ..api.api import TwinAPIEndpoint +from ..api.api_service import APIService from ..code.user_code import UserCodeStatusCollection from ..context import AuthedServiceContext from ..job.job_stash import Job @@ -156,11 +157,11 @@ def set_object( if isinstance(item, TwinAPIEndpoint): # we need the side effect of set function # to create an action object - res = context.node.get_service("apiservice").set( - context=context, endpoint=item - ) + apiservice: APIService = context.node.get_service("apiservice") # type: ignore + + res = apiservice.set(context=context, endpoint=item) if isinstance(res, SyftError): - return res + return Err(res.message) else: return Ok(item) From 620e5800cb42dfbaf73c96453a6c70ff2b5c2d6e Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Fri, 21 Jun 2024 12:23:22 +0200 Subject: [PATCH 244/313] lazy load actionobjects --- packages/syft/src/syft/client/domain_client.py | 3 ++- .../syft/src/syft/service/action/action_service.py | 11 +++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/packages/syft/src/syft/client/domain_client.py b/packages/syft/src/syft/client/domain_client.py index 699ab0a0682..fb83d33e1fa 100644 --- a/packages/syft/src/syft/client/domain_client.py +++ b/packages/syft/src/syft/client/domain_client.py @@ -173,7 +173,6 @@ def get_sync_state(self) -> SyncState | SyftError: for uid, obj in state.objects.items(): if isinstance(obj, ActionObject): obj = obj.refresh_object(resolve_nested=False) - obj.reload_cache() state.objects[uid] = obj return state @@ -185,8 +184,10 @@ def apply_state(self, resolved_state: ResolvedSyncState) -> SyftSuccess | SyftEr action_objects = [x for x in items if isinstance(x, ActionObject)] for action_object in action_objects: + action_object.reload_cache() # NOTE permissions are added separately server side action_object._send(self.id, self.verify_key, add_storage_permission=False) + action_object = action_object.as_empty() ignored_batches = resolved_state.ignored_batches diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 58e7b78f9bc..931194c22ec 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -341,7 +341,9 @@ def _user_code_execute( filtered_kwargs = result.ok() if hasattr(input_policy, "transform_kwargs"): - filtered_kwargs_res = input_policy.transform_kwargs(context, filtered_kwargs) + filtered_kwargs_res = input_policy.transform_kwargs( + context, filtered_kwargs + ) if filtered_kwargs_res.is_err(): return filtered_kwargs_res else: @@ -1009,11 +1011,12 @@ def filter_twin_kwargs( else: if isinstance(v, ActionObject): filtered[k] = v.syft_action_data - elif isinstance(v, str | int | float | dict | CustomEndpointActionObject) and allow_python_types: + elif ( + isinstance(v, str | int | float | dict | CustomEndpointActionObject) + and allow_python_types + ): filtered[k] = v else: - import ipdb - ipdb.set_trace() raise ValueError( f"unexepected value {v} passed to filtered twin kwargs" ) From 6c4cd8ccd91752656bb4ada7a866241c2cc32291 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Fri, 21 Jun 2024 12:54:41 +0200 Subject: [PATCH 245/313] fix ao lazy load for sync --- packages/syft/src/syft/client/domain_client.py | 2 +- packages/syft/src/syft/service/sync/diff_state.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/client/domain_client.py b/packages/syft/src/syft/client/domain_client.py index debeaaef043..ca9ee53ce22 100644 --- a/packages/syft/src/syft/client/domain_client.py +++ b/packages/syft/src/syft/client/domain_client.py @@ -192,7 +192,7 @@ def apply_state(self, resolved_state: ResolvedSyncState) -> SyftSuccess | SyftEr action_object.reload_cache() # NOTE permissions are added separately server side action_object._send(self.id, self.verify_key, add_storage_permission=False) - action_object = action_object.as_empty() + action_object._clear_cache() ignored_batches = resolved_state.ignored_batches diff --git a/packages/syft/src/syft/service/sync/diff_state.py b/packages/syft/src/syft/service/sync/diff_state.py index 24d89af2fd6..8f243dd8bc3 100644 --- a/packages/syft/src/syft/service/sync/diff_state.py +++ b/packages/syft/src/syft/service/sync/diff_state.py @@ -363,7 +363,8 @@ def repr_attr_diffstatus_dict(self) -> dict: def repr_attr_dict(self, side: str) -> dict[str, Any]: obj = self.low_obj if side == "low" else self.high_obj if isinstance(obj, ActionObject): - return {"value": obj.syft_action_data_cache} + # Only safe for ActionObjects created by data owners + return {"value": obj.syft_action_data_repr_} repr_attrs = getattr(obj, "__repr_attrs__", []) res = {} for attr in repr_attrs: From 319bcc288b24aee832e768253d8fd4a019f42d68 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Fri, 21 Jun 2024 13:14:19 +0200 Subject: [PATCH 246/313] fix lint --- notebooks/Bigquery full flow.ipynb | 206 +- notebooks/auto_sync.ipynb | 3584 ----------------- packages/syft/src/syft/__init__.py | 4 +- .../src/syft/service/action/action_service.py | 14 +- packages/syft/src/syft/service/api/api.py | 6 +- .../syft/src/syft/service/code/user_code.py | 2 +- .../syft/src/syft/service/policy/policy.py | 37 +- .../syft/src/syft/service/request/request.py | 6 +- .../syft/service/request/request_service.py | 3 +- .../syft/src/syft/store/document_store.py | 2 - 10 files changed, 63 insertions(+), 3801 deletions(-) delete mode 100644 notebooks/auto_sync.ipynb diff --git a/notebooks/Bigquery full flow.ipynb b/notebooks/Bigquery full flow.ipynb index 6337b4847e4..ff479dd2bdd 100644 --- a/notebooks/Bigquery full flow.ipynb +++ b/notebooks/Bigquery full flow.ipynb @@ -15,13 +15,15 @@ "metadata": {}, "outputs": [], "source": [ - "import time\n", + "# stdlib\n", "\n", + "# syft absolute\n", "import syft as sy\n", "from syft.client.domain_client import DomainClient\n", "from syft.client.syncing import compare_clients\n", "from syft.service.code.user_code import UserCode\n", - "from syft.service.job.job_stash import Job, JobStatus\n", + "from syft.service.job.job_stash import Job\n", + "from syft.service.job.job_stash import JobStatus\n", "from syft.service.request.request import Request\n", "from syft.service.sync.diff_state import ObjectDiffBatch\n", "\n", @@ -139,7 +141,7 @@ " print(\"Starting auto sync\")\n", " sync_and_execute_new_requests(client_low, client_high)\n", " sync_finished_jobs(client_low, client_high)\n", - " print(\"Finished auto sync\")\n" + " print(\"Finished auto sync\")" ] }, { @@ -157,7 +159,7 @@ "metadata": {}, "outputs": [], "source": [ - "from google.cloud import bigquery\n", + "# third party\n", "from google.oauth2 import service_account" ] }, @@ -294,9 +296,10 @@ "metadata": {}, "outputs": [], "source": [ + "# stdlib\n", "import json\n", "\n", - "with open(\"./credentials.json\", \"r\") as f:\n", + "with open(\"./credentials.json\") as f:\n", " BQ_CREDENTIALS = json.loads(f.read())" ] }, @@ -365,8 +368,9 @@ " sql_query: str,\n", ") -> str:\n", " # third party\n", + "\n", + " # third party\n", " from google.cloud import bigquery\n", - " from google.oauth2 import service_account\n", "\n", " # syft absolute\n", " from syft.service.response import SyftError\n", @@ -506,6 +510,7 @@ " func_name: str,\n", " query: str,\n", ") -> str:\n", + " # syft absolute\n", " import syft as sy\n", "\n", " if not func_name.isalpha():\n", @@ -818,9 +823,17 @@ } ], "source": [ - "# do forever\n", - "for i in range(4):\n", - " auto_sync(client_low, client_high)" + "# stdlib\n", + "import time\n", + "\n", + "# sync every 5 seconds\n", + "\n", + "for _ in range(5):\n", + " try:\n", + " auto_sync(client_low, client_high)\n", + " except Exception as e:\n", + " print(e)\n", + " time.sleep(5)" ] }, { @@ -1046,181 +1059,6 @@ "metadata": {}, "outputs": [], "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9a12184b-9bc0-438f-a55f-2efa2ed6c93a", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "bfebf0d6-3e1e-445a-883c-3c95dba0a558", - "metadata": {}, - "outputs": [], - "source": [ - "# @sy.syft_function(input_policy=sy.MixedInputPolicy(endpoint=client_high.api.services.reddit.query, client=client_high))\n", - "# def execute_query(endpoint):\n", - "# res = endpoint(sql_query=\"SELECT * from data_10gb.comments LIMIT 40\");\n", - "# return res" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "8ecee14c-fa09-426c-a0f5-4a5e9904e3d7", - "metadata": {}, - "outputs": [], - "source": [ - "# client_high.code.submit(execute_query)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "bf902d3b-4bfc-470f-9b47-b7a195e45fe4", - "metadata": {}, - "outputs": [], - "source": [ - "# res = client_high.code.execute_query(endpoint=client_high.api.services.reddit.query)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "abfd38f6-1dfb-4a36-83b3-74bbbf03cfbe", - "metadata": {}, - "outputs": [], - "source": [ - "# res" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "8945ad4a-2106-45a9-b6eb-dfe9b35c3b0a", - "metadata": {}, - "outputs": [], - "source": [ - "# @sy.syft_function(input_policy=sy.ExactMatch(endpoint=client_high.api.services.reddit.query))\n", - "# def execute_query(endpoint):\n", - "# res = endpoint(sql_query=\"SELECT * from data_10gb.comments LIMIT 40\");\n", - "# return res" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "6c99de9f-7aed-430f-a81b-7215d4fe9270", - "metadata": {}, - "outputs": [], - "source": [ - "# client_high.code.submit(abc)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "e1759c17-e1e4-4061-9fe2-0ed757fb7a97", - "metadata": {}, - "outputs": [], - "source": [ - "# res = client_high.code.abc(endpoint=client_high.api.services.reddit.query)" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "520377d1-5774-43cd-bb4c-35bd925fdb89", - "metadata": {}, - "outputs": [], - "source": [ - "# res.syft_action_data.head()" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "e71aa8c6-d827-485e-bcf2-218f634346fc", - "metadata": {}, - "outputs": [], - "source": [ - "# @sy.api_endpoint(path=\"reddit.submit_query\")\n", - "# def submit_query(\n", - "# context, func_name: str, query: str,\n", - "# ) -> str:\n", - "# import syft as sy\n", - "\n", - "# if not func_name.isalpha():\n", - "# return sy.SyftError(message=\"Please only use alphabetic characters for your func_name\")\n", - "\n", - "# @sy.syft_function(name=func_name, input_policy=sy.MixedInputPolicy(query=sy.Constant(val=query),client=context.admin_client)\n", - "# def execute_query(query: str):\n", - "\n", - "# return f\"your query {query} was EXECUTED\"\n", - "\n", - "# res = context.user_client.code.request_code_execution(execute_query)\n", - "\n", - "# return f\"Query submitted {res}, use `client.code.{func_name}()` to run your query\"" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "bc8c4bb4-f41f-4344-8817-41c4e6780912", - "metadata": {}, - "outputs": [], - "source": [ - "# sy.api_endpoint(path=\"reddit.query\"" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "c6a426e8-314a-4a44-8bbe-47f8f8663835", - "metadata": {}, - "outputs": [], - "source": [ - "# for _ in range(10):\n", - "# j = client_high.code.query_0(blocking=False)\n", - "# print(j)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "985075a7-833b-4e4e-8441-1bc34c397148", - "metadata": {}, - "outputs": [], - "source": [ - "# def make_request(client):\n", - "# existing_requests = client.requests.get_all()\n", - "\n", - "# @sy.syft_function_single_use()\n", - "# def func():\n", - "# return 10\n", - "\n", - "# func.func_name = f\"query_{len(existing_requests)}\"\n", - "# func.code = func.code.replace(\"def func(\", f\"def {func.func_name}(\")\n", - "\n", - "# res = client.code.request_code_execution(func)\n", - "# return res\n", - "\n", - "\n", - "# for _ in range(5):\n", - "# make_request(client_low_ds)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8674ffae-4635-43bd-9117-0959cd0040ba", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/notebooks/auto_sync.ipynb b/notebooks/auto_sync.ipynb deleted file mode 100644 index f69f49e0e64..00000000000 --- a/notebooks/auto_sync.ipynb +++ /dev/null @@ -1,3584 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "2e4548e0-20ce-472d-aa0f-e1c29952b694", - "metadata": {}, - "outputs": [], - "source": [ - "import time\n", - "\n", - "import syft as sy\n", - "from syft.client.domain_client import DomainClient\n", - "from syft.client.syncing import compare_clients\n", - "from syft.service.code.user_code import UserCode\n", - "from syft.service.job.job_stash import Job, JobStatus\n", - "from syft.service.request.request import Request\n", - "from syft.service.sync.diff_state import ObjectDiffBatch\n", - "\n", - "\n", - "def is_request_to_sync(batch: ObjectDiffBatch) -> bool:\n", - " # True if this is a new low-side request\n", - " # TODO add condition for sql requests/usercodes\n", - " low_request = batch.root.low_obj\n", - " return isinstance(low_request, Request) and batch.status == \"NEW\"\n", - "\n", - "\n", - "def is_job_to_sync(batch: ObjectDiffBatch):\n", - " # True if this is a new high-side job that is either COMPLETED or ERRORED\n", - " if batch.status != \"NEW\":\n", - " return False\n", - " if not isinstance(batch.root.high_obj, Job):\n", - " return False\n", - " job = batch.root.high_obj\n", - " return job.status in (JobStatus.ERRORED, JobStatus.COMPLETED)\n", - "\n", - "\n", - "def sync_new_requests(\n", - " client_low: DomainClient,\n", - " client_high: DomainClient,\n", - ") -> dict[sy.UID, sy.SyftSuccess | sy.SyftError] | sy.SyftError:\n", - " sync_request_results = {}\n", - " diff = compare_clients(\n", - " from_client=client_low, to_client=client_high, include_types=[\"request\"]\n", - " )\n", - " if isinstance(diff, sy.SyftError):\n", - " print(diff)\n", - " return sync_request_results\n", - " for batch in diff.batches:\n", - " if is_request_to_sync(batch):\n", - " request_id = batch.root.low_obj.id\n", - " w = batch.resolve()\n", - " result = w.click_sync()\n", - " sync_request_results[request_id] = result\n", - " return sync_request_results\n", - "\n", - "\n", - "def execute_requests(\n", - " client_high: DomainClient, request_ids: list[sy.UID]\n", - ") -> dict[sy.UID, Job]:\n", - " jobs_by_request_id = {}\n", - " for request_id in request_ids:\n", - " request = client_high.requests.get_by_uid(request_id)\n", - " if not isinstance(request, Request):\n", - " continue\n", - "\n", - " code = request.code\n", - " if not isinstance(code, UserCode):\n", - " continue\n", - "\n", - " func_name = request.code.service_func_name\n", - " api_func = getattr(client_high.code, func_name, None)\n", - " if api_func is None:\n", - " continue\n", - "\n", - " job = api_func(blocking=False)\n", - " jobs_by_request_id[request_id] = job\n", - " # sleep to prevent SQLite connection pool issues\n", - " time.sleep(1)\n", - "\n", - " return jobs_by_request_id\n", - "\n", - "\n", - "def sync_and_execute_new_requests(\n", - " client_low: DomainClient, client_high: DomainClient\n", - ") -> None:\n", - " sync_results = sync_new_requests(client_low, client_high)\n", - " if isinstance(sync_results, sy.SyftError):\n", - " print(sync_results)\n", - " return\n", - "\n", - " request_ids = [\n", - " uid for uid, res in sync_results.items() if isinstance(res, sy.SyftSuccess)\n", - " ]\n", - " print(f\"Synced {len(request_ids)} new requests\")\n", - "\n", - " jobs_by_request = execute_requests(client_high, request_ids)\n", - " print(f\"Started {len(jobs_by_request)} new jobs\")\n", - "\n", - "\n", - "def sync_finished_jobs(\n", - " client_low: DomainClient,\n", - " client_high: DomainClient,\n", - ") -> dict[sy.UID, sy.SyftError | sy.SyftSuccess] | sy.SyftError:\n", - " sync_job_results = {}\n", - " diff = compare_clients(\n", - " from_client=client_high, to_client=client_low, include_types=[\"job\"]\n", - " )\n", - " if isinstance(diff, sy.SyftError):\n", - " print(diff)\n", - " return diff\n", - "\n", - " for batch in diff.batches:\n", - " if is_job_to_sync(batch):\n", - " batch_id = batch.root.high_obj.id\n", - " w = batch.resolve()\n", - " share_result = w.click_share_all_private_data()\n", - " if isinstance(share_result, sy.SyftError):\n", - " sync_job_results[batch_id] = share_result\n", - " continue\n", - " sync_result = w.click_sync()\n", - " sync_job_results[batch_id] = sync_result\n", - "\n", - " print(f\"Sharing {len(sync_job_results)} new results\")\n", - " return sync_job_results\n", - "\n", - "\n", - "def auto_sync(client_low: DomainClient, client_high: DomainClient) -> None:\n", - " print(\"Starting auto sync\")\n", - " sync_and_execute_new_requests(client_low, client_high)\n", - " sync_finished_jobs(client_low, client_high)\n", - " print(\"Finished auto sync\")\n", - "\n", - "\n", - "def auto_sync_loop(\n", - " client_low: DomainClient, client_high: DomainClient, sleep_seconds: int = 60\n", - ") -> None:\n", - " while True:\n", - " auto_sync(client_low, client_high)\n", - " time.sleep(sleep_seconds)" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "197db367-bc49-4a41-ba94-756e8b8b4bf4", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Staging Protocol Changes...\n", - "Document Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/8fb2b26c3b5d4db2a0cb775fe2a3d825/db/8fb2b26c3b5d4db2a0cb775fe2a3d825.sqlite\n", - "Action Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/8fb2b26c3b5d4db2a0cb775fe2a3d825/db/8fb2b26c3b5d4db2a0cb775fe2a3d825.sqlite\n", - "Creating default worker image with tag='local-dev'\n", - "Setting up worker poolname=default-pool workers=0 image_uid=ecdf0b0b455f423a9c5c84b1f51be2b2 in_memory=True\n", - "Created default worker pool.\n", - "Data Migrated to latest version !!!\n", - "Staging Protocol Changes...\n", - "Document Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", - "Action Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", - "Creating default worker image with tag='local-dev'\n", - "Setting up worker poolname=default-pool workers=4 image_uid=7e1a9ab048964c71bda02800733738e6 in_memory=True\n", - "Created default worker pool.\n", - "Data Migrated to latest version !!!\n", - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`.

" - ], - "text/plain": [ - "SyftWarning: You are using a default password. Please change the password using `[your_client].me.set_password([new_password])`." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as \n" - ] - } - ], - "source": [ - "low_side = sy.orchestra.launch(\n", - " name=\"low-side\",\n", - " node_side_type=\"low\",\n", - " local_db=True,\n", - " reset=True,\n", - " dev_mode=True,\n", - ")\n", - "\n", - "high_side = sy.orchestra.launch(\n", - " name=\"high-side\",\n", - " node_side_type=\"high\",\n", - " local_db=True,\n", - " reset=True,\n", - " n_consumers=4,\n", - " create_producer=True,\n", - " dev_mode=True,\n", - ")\n", - "\n", - "client_high = high_side.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "client_low = low_side.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "client_low.register(\n", - " email=\"newuser@openmined.org\", name=\"John Doe\", password=\"pw\", password_verify=\"pw\"\n", - ")\n", - "client_low_ds = low_side.login(email=\"newuser@openmined.org\", password=\"pw\")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "ba5648cc-db15-4a07-bbbe-76bef5b270c8", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess:
Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
SyftSuccess:
Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
SyftSuccess:
Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
SyftSuccess:
Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
SyftSuccess:
Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'func' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "def make_request(client):\n", - " existing_requests = client.requests.get_all()\n", - "\n", - " @sy.syft_function_single_use()\n", - " def func():\n", - " return 10\n", - "\n", - " func.func_name = f\"query_{len(existing_requests)}\"\n", - " func.code = func.code.replace(\"def func(\", f\"def {func.func_name}(\")\n", - "\n", - " res = client.code.request_code_execution(func)\n", - " return res\n", - "\n", - "\n", - "for _ in range(5):\n", - " make_request(client_low_ds)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "69d53f0f-0c49-460e-bf13-e3f53415193a", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Starting auto sync\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" - ], - "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Decision: Syncing 2 objects\n", - "Decision: Syncing 2 objects\n", - "Decision: Syncing 2 objects\n", - "Decision: Syncing 2 objects\n", - "Decision: Syncing 2 objects\n", - "Synced 5 new requests\n", - "START Setting Job 0588393eb0bc4e74a788cc48780a60e2, thread 8023429120\n", - "END Setting Job 0588393eb0bc4e74a788cc48780a60e2, thread 8023429120\n", - "START Updating Job 0588393eb0bc4e74a788cc48780a60e2, thread 11293306880\n", - "END Updating Job 0588393eb0bc4e74a788cc48780a60e2, thread 11293306880, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 0588393eb0bc4e74a788cc48780a60e2 found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job 0588393eb0bc4e74a788cc48780a60e2, thread 11633979392\n", - "END Updating Job 0588393eb0bc4e74a788cc48780a60e2, thread 11633979392, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 0588393eb0bc4e74a788cc48780a60e2 found: Ok(syft.service.job.job_stash.Job)\n", - "START Setting Job c586ffdd174d441eb7c1ca23633629b1, thread 8023429120\n", - "END Setting Job c586ffdd174d441eb7c1ca23633629b1, thread 8023429120\n", - "START Setting Job f45877b68917412bb85b806e179871ed, thread 8023429120\n", - "END Setting Job f45877b68917412bb85b806e179871ed, thread 8023429120\n", - "START Updating Job c586ffdd174d441eb7c1ca23633629b1, thread 11275333632\n", - "END Updating Job c586ffdd174d441eb7c1ca23633629b1, thread 11275333632, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job c586ffdd174d441eb7c1ca23633629b1 found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job f45877b68917412bb85b806e179871ed, thread 11311280128\n", - "END Updating Job f45877b68917412bb85b806e179871ed, thread 11311280128, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job f45877b68917412bb85b806e179871ed found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job c586ffdd174d441eb7c1ca23633629b1, thread 11668025344\n", - "END Updating Job c586ffdd174d441eb7c1ca23633629b1, thread 11668025344, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job c586ffdd174d441eb7c1ca23633629b1 found: Ok(syft.service.job.job_stash.Job)\n", - "START Setting Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 8023429120\n", - "END Setting Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 8023429120\n", - "START Updating Job f45877b68917412bb85b806e179871ed, thread 11827965952\n", - "END Updating Job f45877b68917412bb85b806e179871ed, thread 11827965952, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job f45877b68917412bb85b806e179871ed found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 11511902208\n", - "END Updating Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 11511902208, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job cc2aaa7f2c7542a69e3fac248d5acd8d found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 11827965952\n", - "END Updating Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 11827965952, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job cc2aaa7f2c7542a69e3fac248d5acd8d found: Ok(syft.service.job.job_stash.Job)\n", - "START Setting Job 0537c2e924334bedbd2b641625905cc4, thread 8023429120\n", - "END Setting Job 0537c2e924334bedbd2b641625905cc4, thread 8023429120\n", - "START Updating Job 0537c2e924334bedbd2b641625905cc4, thread 11293306880\n", - "END Updating Job 0537c2e924334bedbd2b641625905cc4, thread 11293306880, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 0537c2e924334bedbd2b641625905cc4 found: Ok(syft.service.job.job_stash.Job)\n", - "Started 5 new jobs\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" - ], - "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "START Updating Job 0537c2e924334bedbd2b641625905cc4, thread 11639091200\n", - "END Updating Job 0537c2e924334bedbd2b641625905cc4, thread 11639091200, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 0537c2e924334bedbd2b641625905cc4 found: Ok(syft.service.job.job_stash.Job)\n", - "Decision: Syncing 5 objects\n", - "START Setting Job f45877b68917412bb85b806e179871ed, thread 8023429120\n", - "END Setting Job f45877b68917412bb85b806e179871ed, thread 8023429120\n", - "Decision: Syncing 5 objects\n", - "START Setting Job 0588393eb0bc4e74a788cc48780a60e2, thread 8023429120\n", - "END Setting Job 0588393eb0bc4e74a788cc48780a60e2, thread 8023429120\n", - "Decision: Syncing 5 objects\n", - "START Setting Job c586ffdd174d441eb7c1ca23633629b1, thread 8023429120\n", - "END Setting Job c586ffdd174d441eb7c1ca23633629b1, thread 8023429120\n", - "Decision: Syncing 5 objects\n", - "START Setting Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 8023429120\n", - "END Setting Job cc2aaa7f2c7542a69e3fac248d5acd8d, thread 8023429120\n", - "Sharing 4 new results\n", - "Finished auto sync\n" - ] - } - ], - "source": [ - "auto_sync(client_low, client_high)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "c6a426e8-314a-4a44-8bbe-47f8f8663835", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "START Setting Job 504dfa7a0309408dad11188df0a267e9, thread 8023429120\n", - "END Setting Job 504dfa7a0309408dad11188df0a267e9, thread 8023429120\n", - "syft.service.job.job_stash.Job\n", - "START Setting Job c9d754572ff14cc285c01b4b4e8bb86e, thread 8023429120\n", - "END Setting Job c9d754572ff14cc285c01b4b4e8bb86e, thread 8023429120\n", - "syft.service.job.job_stash.Job\n", - "START Setting Job 449c70aaecc4479f92835cf633985815, thread 8023429120\n", - "END Setting Job 449c70aaecc4479f92835cf633985815, thread 8023429120\n", - "syft.service.job.job_stash.Job\n", - "START Setting Job da0238a7f7794ef79bc49b987cf2f22d, thread 8023429120\n", - "END Setting Job da0238a7f7794ef79bc49b987cf2f22d, thread 8023429120\n", - "syft.service.job.job_stash.Job\n", - "START Setting Job 099e40c538e44232969b439589226e10, thread 8023429120\n", - "END Setting Job 099e40c538e44232969b439589226e10, thread 8023429120\n", - "syft.service.job.job_stash.Job\n", - "START Setting Job 011a06deaf05409983cc924cfc13f8fe, thread 8023429120\n", - "END Setting Job 011a06deaf05409983cc924cfc13f8fe, thread 8023429120\n", - "syft.service.job.job_stash.Job\n", - "START Setting Job dcfa4476caa44186a2553d40e4a7bead, thread 8023429120\n", - "END Setting Job dcfa4476caa44186a2553d40e4a7bead, thread 8023429120\n", - "syft.service.job.job_stash.Job\n", - "START Setting Job 15cd03cde93d4c92b6347a452cde32ac, thread 8023429120\n", - "END Setting Job 15cd03cde93d4c92b6347a452cde32ac, thread 8023429120\n", - "syft.service.job.job_stash.Job\n", - "START Setting Job e05894266e6445259f7515e8441751a7, thread 8023429120\n", - "END Setting Job e05894266e6445259f7515e8441751a7, thread 8023429120\n", - "syft.service.job.job_stash.Job\n", - "START Setting Job 3da0ad0017134db490b0b3f24d78e56f, thread 8023429120\n", - "END Setting Job 3da0ad0017134db490b0b3f24d78e56f, thread 8023429120\n", - "syft.service.job.job_stash.Job\n", - "START Updating Job da0238a7f7794ef79bc49b987cf2f22d, thread 11311280128\n", - "END Updating Job da0238a7f7794ef79bc49b987cf2f22d, thread 11311280128, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job da0238a7f7794ef79bc49b987cf2f22d found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job 504dfa7a0309408dad11188df0a267e9, thread 11511902208\n", - "START Updating Job 099e40c538e44232969b439589226e10, thread 11293306880\n", - "START Updating Job dcfa4476caa44186a2553d40e4a7bead, thread 11275333632\n", - "END Updating Job 099e40c538e44232969b439589226e10, thread 11293306880, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 099e40c538e44232969b439589226e10 found: Ok(None)\n", - "END Updating Job 504dfa7a0309408dad11188df0a267e9, thread 11511902208, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 504dfa7a0309408dad11188df0a267e9 found: Ok(syft.service.job.job_stash.Job)\n", - "END Updating Job dcfa4476caa44186a2553d40e4a7bead, thread 11275333632, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job dcfa4476caa44186a2553d40e4a7bead found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job da0238a7f7794ef79bc49b987cf2f22d, thread 11827965952\n", - "START Updating Job 504dfa7a0309408dad11188df0a267e9, thread 11878445056\n", - "START Updating Job 099e40c538e44232969b439589226e10, thread 11861618688\n", - "END Updating Job da0238a7f7794ef79bc49b987cf2f22d, thread 11827965952, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job da0238a7f7794ef79bc49b987cf2f22d found: Ok(syft.service.job.job_stash.Job)\n", - "END Updating Job 099e40c538e44232969b439589226e10, thread 11861618688, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 099e40c538e44232969b439589226e10 found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job dcfa4476caa44186a2553d40e4a7bead, thread 11895271424\n", - "END Updating Job 504dfa7a0309408dad11188df0a267e9, thread 11878445056, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 504dfa7a0309408dad11188df0a267e9 found: Ok(syft.service.job.job_stash.Job)\n", - "END Updating Job dcfa4476caa44186a2553d40e4a7bead, thread 11895271424, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job dcfa4476caa44186a2553d40e4a7bead found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job 011a06deaf05409983cc924cfc13f8fe, thread 11311280128\n", - "END Updating Job 011a06deaf05409983cc924cfc13f8fe, thread 11311280128, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 011a06deaf05409983cc924cfc13f8fe found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job 15cd03cde93d4c92b6347a452cde32ac, thread 11511902208\n", - "START Updating Job 449c70aaecc4479f92835cf633985815, thread 11293306880\n", - "END Updating Job 15cd03cde93d4c92b6347a452cde32ac, thread 11511902208, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 15cd03cde93d4c92b6347a452cde32ac found: Ok(syft.service.job.job_stash.Job)\n", - "END Updating Job 449c70aaecc4479f92835cf633985815, thread 11293306880, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 449c70aaecc4479f92835cf633985815 found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job c9d754572ff14cc285c01b4b4e8bb86e, thread 11275333632\n", - "END Updating Job c9d754572ff14cc285c01b4b4e8bb86e, thread 11275333632, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job c9d754572ff14cc285c01b4b4e8bb86e found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job 011a06deaf05409983cc924cfc13f8fe, thread 11827965952\n", - "END Updating Job 011a06deaf05409983cc924cfc13f8fe, thread 11827965952, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 011a06deaf05409983cc924cfc13f8fe found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job 15cd03cde93d4c92b6347a452cde32ac, thread 11861618688\n", - "START Updating Job 449c70aaecc4479f92835cf633985815, thread 11878445056\n", - "END Updating Job 15cd03cde93d4c92b6347a452cde32ac, thread 11861618688, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 15cd03cde93d4c92b6347a452cde32ac found: Ok(syft.service.job.job_stash.Job)\n", - "END Updating Job 449c70aaecc4479f92835cf633985815, thread 11878445056, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 449c70aaecc4479f92835cf633985815 found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job e05894266e6445259f7515e8441751a7, thread 11311280128\n", - "START Updating Job c9d754572ff14cc285c01b4b4e8bb86e, thread 11895271424\n", - "END Updating Job e05894266e6445259f7515e8441751a7, thread 11311280128, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job e05894266e6445259f7515e8441751a7 found: Ok(syft.service.job.job_stash.Job)\n", - "END Updating Job c9d754572ff14cc285c01b4b4e8bb86e, thread 11895271424, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job c9d754572ff14cc285c01b4b4e8bb86e found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job 3da0ad0017134db490b0b3f24d78e56f, thread 11293306880\n", - "END Updating Job 3da0ad0017134db490b0b3f24d78e56f, thread 11293306880, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 3da0ad0017134db490b0b3f24d78e56f found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job e05894266e6445259f7515e8441751a7, thread 11827965952\n", - "END Updating Job e05894266e6445259f7515e8441751a7, thread 11827965952, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job e05894266e6445259f7515e8441751a7 found: Ok(syft.service.job.job_stash.Job)\n", - "START Updating Job 3da0ad0017134db490b0b3f24d78e56f, thread 11861618688\n", - "END Updating Job 3da0ad0017134db490b0b3f24d78e56f, thread 11861618688, res: Ok(syft.service.job.job_stash.Job), obj: syft.service.job.job_stash.Job\n", - "Job 3da0ad0017134db490b0b3f24d78e56f found: Ok(syft.service.job.job_stash.Job)\n" - ] - } - ], - "source": [ - "for _ in range(10):\n", - " j = client_high.code.query_0(blocking=False)\n", - " print(j)" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "id": "2eb8c549-ddb8-4970-bf36-2c6bb77b3eb4", - "metadata": {}, - "outputs": [ - { - "ename": "TypeError", - "evalue": "BaseStash.query_one() got multiple values for argument 'credentials'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[30], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mhigh_side\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpython_node\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mjob_stash\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mquery_one\u001b[49m\u001b[43m(\u001b[49m\u001b[43msy\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mUID\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m72a80389d09043e087aa5be880df38e8\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcredentials\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mclient_high\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mverify_key\u001b[49m\u001b[43m)\u001b[49m\n", - "\u001b[0;31mTypeError\u001b[0m: BaseStash.query_one() got multiple values for argument 'credentials'" - ] - } - ], - "source": [ - "high_side.python_node.job_stash.query_one(sy.UID(\"72a80389d09043e087aa5be880df38e8\"), credentials=client_high.verify_key)" - ] - }, - { - "cell_type": "code", - "execution_count": 36, - "id": "e96839aa-dfcc-4933-967a-7e32cac1b9ef", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Ok(None)" - ] - }, - "execution_count": 36, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from syft.store.document_store import QueryKeys, UIDPartitionKey\n", - "\n", - "job_stash = high_side.python_node.job_stash\n", - "credentials = client_high.verify_key\n", - "uid = sy.UID(\"72a80389d09043e087aa5be880df38e8\")\n", - "qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)])\n", - "r = job_stash.query_one(credentials=credentials, qks=qks)\n", - "\n", - "r" - ] - }, - { - "cell_type": "code", - "execution_count": 59, - "id": "2b3d4a11-a50d-41d2-acc9-cd171b1e757a", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{: ,\n", - " : ,\n", - " : ,\n", - " : ,\n", - " : ,\n", - " : ,\n", - " : ,\n", - " : ,\n", - " : ,\n", - " : ,\n", - " : ,\n", - " : ,\n", - " : ,\n", - " : }" - ] - }, - "execution_count": 59, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job_stash.partition.unique_keys[\"id\"]" - ] - }, - { - "cell_type": "code", - "execution_count": 47, - "id": "d3446d13-35f0-46c9-8914-414095998c49", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "> \u001b[0;32m/Users/eelco/.pyenv/versions/3.10.13/lib/python3.10/uuid.py\u001b[0m(177)\u001b[0;36m__init__\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m 175 \u001b[0;31m \u001b[0mhex\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhex\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'{}'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreplace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'-'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m''\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 176 \u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhex\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;36m32\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m--> 177 \u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'badly formed hexadecimal UUID string'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 178 \u001b[0;31m \u001b[0mint\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint_\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhex\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m16\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 179 \u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mbytes_le\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\n" - ] - }, - { - "name": "stdin", - "output_type": "stream", - "text": [ - "ipdb> value\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "*** NameError: name 'value' is not defined\n" - ] - }, - { - "name": "stdin", - "output_type": "stream", - "text": [ - "ipdb> u\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "> \u001b[0;32m/Users/eelco/dev/PySyft/packages/syft/src/syft/types/uid.py\u001b[0m(71)\u001b[0;36m__init__\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m 69 \u001b[0;31m \u001b[0;31m# if value is not set - create a novel and unique ID.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 70 \u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m---> 71 \u001b[0;31m \u001b[0mvalue\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0muuid\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mUUID\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mversion\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 72 \u001b[0;31m \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbytes\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 73 \u001b[0;31m \u001b[0mvalue\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0muuid\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mUUID\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbytes\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mversion\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\n" - ] - }, - { - "name": "stdin", - "output_type": "stream", - "text": [ - "ipdb> value\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "'id'\n" - ] - }, - { - "name": "stdin", - "output_type": "stream", - "text": [ - "ipdb> q\n" - ] - } - ], - "source": [ - "%debug" - ] - }, - { - "cell_type": "code", - "execution_count": 43, - "id": "7b2fce32-ec29-4b31-b21b-4255435f3a81", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 43, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job_stash.partition.matches_unique_cks(qks.all[0].partition_key)" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "id": "f7c560cd-a392-4410-8d2f-7a0f83097bb5", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\u001b[0;31mSignature:\u001b[0m\n", - "\u001b[0mhigh_side\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpython_node\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjob_stash\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mquery_one\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0mcredentials\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m'SyftVerifyKey'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0mqks\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m'QueryKey | QueryKeys'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m \u001b[0morder_by\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m'PartitionKey | None'\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\n", - "\u001b[0;34m\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0;34m'Result[BaseStash.object_type | None, str]'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mDocstring:\u001b[0m \n", - "\u001b[0;31mFile:\u001b[0m ~/dev/PySyft/packages/syft/src/syft/store/document_store.py\n", - "\u001b[0;31mType:\u001b[0m method" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "high_side.python_node.job_stash.query_one?" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "b6f683f0-a333-40a7-93b4-47b12b51f261", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
high-side/jobs/
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - " JOB\n", - "
\n", - "\n", - " query_0\n", - "
\n", - " \n", - " \n", - "
\n", - " \n", - " #72a80389d09043e087aa5be880df38e8\n", - " \n", - " \n", - " \n", - "\n", - "
\n", - " \n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - " UserCode:\n", - " query_0\n", - "
\n", - "
\n", - " Status:\n", - " Processing\n", - "
\n", - "
\n", - " \n", - " Started At:\n", - " 2024-06-19 15:59:41.34862 by Jane Doe info@openmined.org\n", - "
\n", - "
\n", - " \n", - " Updated At:\n", - " 2024-06-19 1\n", - "
\n", - " \n", - "
\n", - " \n", - " Worker Pool:\n", - " default-pool-3 on worker \n", - " \n", - "
\n", - " \n", - " #default-pool\n", - " \n", - " \n", - " \n", - "\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " Subjobs:\n", - " 0\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - " \n", - " \n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - " syft.service.action.action_data_empty.ObjectNotReady\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n",
-       "        Message\n",
-       "\n",
-       "    
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "\n" - ], - "text/markdown": [ - "```python\n", - "class Job:\n", - " id: UID = 72a80389d09043e087aa5be880df38e8\n", - " status: processing\n", - " has_parent: False\n", - " result: syft.service.action.action_data_empty.ObjectNotReady\n", - " logs:\n", - "\n", - "0 \n", - " \n", - "```" - ], - "text/plain": [ - "syft.service.job.job_stash.Job" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "p.data[sy.UID(\"72a80389d09043e087aa5be880df38e8\")]" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "id": "adfcbb52-d8d1-4bda-9aab-d1218b14ed5b", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
high-side/jobs/
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - " JOB\n", - "
\n", - "\n", - " query_0\n", - "
\n", - " \n", - " \n", - "
\n", - " \n", - " #72a80389d09043e087aa5be880df38e8\n", - " \n", - " \n", - " \n", - "\n", - "
\n", - " \n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - " UserCode:\n", - " query_0\n", - "
\n", - "
\n", - " Status:\n", - " Processing\n", - "
\n", - "
\n", - " \n", - " Started At:\n", - " 2024-06-19 15:59:41.34862 by Jane Doe info@openmined.org\n", - "
\n", - "
\n", - " \n", - " Updated At:\n", - " 2024-06-19 1\n", - "
\n", - " \n", - "
\n", - " \n", - " Worker Pool:\n", - " default-pool-3 on worker \n", - " \n", - "
\n", - " \n", - " #default-pool\n", - " \n", - " \n", - " \n", - "\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " Subjobs:\n", - " 0\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - " \n", - " \n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - " syft.service.action.action_data_empty.ObjectNotReady\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n",
-       "        Message\n",
-       "\n",
-       "    
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "\n" - ], - "text/markdown": [ - "```python\n", - "class Job:\n", - " id: UID = 72a80389d09043e087aa5be880df38e8\n", - " status: processing\n", - " has_parent: False\n", - " result: syft.service.action.action_data_empty.ObjectNotReady\n", - " logs:\n", - "\n", - "0 \n", - " \n", - "```" - ], - "text/plain": [ - "syft.service.job.job_stash.Job" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "p._get(sy.UID(\"72a80389d09043e087aa5be880df38e8\"), credentials=client_high.verify_key).ok()" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "id": "c98ae422-6997-4e30-aedd-435bb226fac6", - "metadata": {}, - "outputs": [ - { - "ename": "AttributeError", - "evalue": "'SQLiteStorePartition' object has no attribute 'query_one'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[26], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mquery_one\u001b[49m\n", - "\u001b[0;31mAttributeError\u001b[0m: 'SQLiteStorePartition' object has no attribute 'query_one'" - ] - } - ], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 25, - "id": "bc107bb2-5f1c-4cd4-8a21-2d87b838257d", - "metadata": {}, - "outputs": [], - "source": [ - "client_high.services.job.get(sy.UID(\"72a80389d09043e087aa5be880df38e8\"))" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "id": "cef90608-9470-468d-8f14-3d8304b716ca", - "metadata": {}, - "outputs": [], - "source": [ - "client_high.services.job.get(sy.UID(\"830b9e1fd2bf4bf9b526264e3468f97c\"))" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "86888bee-24c1-40fe-97a7-98e20a0a5783", - "metadata": {}, - "outputs": [], - "source": [ - "import sqlite3\n", - "\n", - "conn = sqlite3.connect(database=\"x\")" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "8724a8dc-14b0-40ac-9abf-26b9095b7a2c", - "metadata": {}, - "outputs": [], - "source": [ - "cursor = conn.cursor()" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "55392717-84cc-4bea-81ad-10b74f8a938a", - "metadata": {}, - "outputs": [], - "source": [ - "conn.close()" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "0671600b-9f8c-402a-a932-43fa449cc3e1", - "metadata": {}, - "outputs": [ - { - "ename": "ProgrammingError", - "evalue": "Cannot operate on a closed database.", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mProgrammingError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[12], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mcursor\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexecute\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mx\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n", - "\u001b[0;31mProgrammingError\u001b[0m: Cannot operate on a closed database." - ] - } - ], - "source": [ - "cursor.execute(\"x\")" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "1c3eb8fc-506e-4160-bdb0-97398675c9f4", - "metadata": {}, - "outputs": [ - { - "ename": "KeyError", - "evalue": "1", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[6], line 3\u001b[0m\n\u001b[1;32m 1\u001b[0m a \u001b[38;5;241m=\u001b[39m {}\n\u001b[0;32m----> 3\u001b[0m \u001b[38;5;28;01mdel\u001b[39;00m a[\u001b[38;5;241m1\u001b[39m]\n", - "\u001b[0;31mKeyError\u001b[0m: 1" - ] - } - ], - "source": [ - "a = {}\n", - "\n", - "del a[1]" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "8c0cfd89-eeb1-42c4-ab2e-26fb3eda95f8", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
high-side/jobs/
\n", - "
\n", - "
\n", - "
\n", - "
\n", - " \n", - " JOB\n", - "
\n", - "\n", - " query_4\n", - "
\n", - " \n", - " \n", - "
\n", - " \n", - " #80bc379a33294eeebd1937f303143386\n", - " \n", - " \n", - " \n", - "\n", - "
\n", - " \n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - " UserCode:\n", - " query_4\n", - "
\n", - "
\n", - " Status:\n", - " Completed\n", - "
\n", - "
\n", - " \n", - " Started At:\n", - " 2024-06-19 15:18:54.33823 by Jane Doe info@openmined.org\n", - "
\n", - "
\n", - " \n", - " Updated At:\n", - " 2024-06-19 1\n", - "
\n", - " \n", - "
\n", - " \n", - " Worker Pool:\n", - " default-pool-1 on worker \n", - " \n", - "
\n", - " \n", - " #default-pool\n", - " \n", - " \n", - " \n", - "\n", - "
\n", - " \n", - "
\n", - " \n", - "
\n", - " Subjobs:\n", - " 0\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - " \n", - " \n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - " 10\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n",
-       "        Message\n",
-       "\n",
-       "    
\n", - "
\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "
\n", - "\n" - ], - "text/markdown": [ - "```python\n", - "class Job:\n", - " id: UID = 80bc379a33294eeebd1937f303143386\n", - " status: completed\n", - " has_parent: False\n", - " result: 10\n", - " logs:\n", - "\n", - "0 \n", - "JOB COMPLETED\n", - " \n", - "```" - ], - "text/plain": [ - "syft.service.job.job_stash.Job" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client_high.jobs[0]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "985075a7-833b-4e4e-8441-1bc34c397148", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index 4fa2909585a..69789efab80 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -61,10 +61,10 @@ from .service.dataset.dataset import CreateDataset as Dataset # noqa: F401 from .service.notification.notifications import NotificationStatus # noqa: F401 from .service.policy.policy import CreatePolicyRuleConstant as Constant # noqa: F401 -from .service.policy.policy import CustomInputPolicy +from .service.policy.policy import CustomInputPolicy # noqa: F401 from .service.policy.policy import CustomOutputPolicy # noqa: F401 from .service.policy.policy import ExactMatch # noqa: F401 -from .service.policy.policy import MixedInputPolicy +from .service.policy.policy import MixedInputPolicy # noqa: F401 from .service.policy.policy import SingleExecutionExactOutput # noqa: F401 from .service.policy.policy import UserInputPolicy # noqa: F401 from .service.policy.policy import UserOutputPolicy # noqa: F401 diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 4be2a3e2211..b765cd6a244 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -7,8 +7,6 @@ from result import Err from result import Ok from result import Result -from syft.service.action.action_endpoint import CustomEndpointActionObject -from syft.service.api.api import TwinAPIEndpoint # relative from ...node.credentials import SyftVerifyKey @@ -33,6 +31,7 @@ from ..user.user_roles import ADMIN_ROLE_LEVEL from ..user.user_roles import GUEST_ROLE_LEVEL from ..user.user_roles import ServiceRole +from .action_endpoint import CustomEndpointActionObject from .action_object import Action from .action_object import ActionObject from .action_object import ActionObjectPointer @@ -381,8 +380,9 @@ def _user_code_execute( filtered_kwargs = result.ok() if hasattr(input_policy, "transform_kwargs"): - filtered_kwargs_res = input_policy.transform_kwargs( - context, filtered_kwargs + filtered_kwargs_res = input_policy.transform_kwargs( # type: ignore + context, + filtered_kwargs, ) if filtered_kwargs_res.is_err(): return filtered_kwargs_res @@ -415,7 +415,7 @@ def _user_code_execute( exec_result.result, update_policy=not override_execution_permission, ) - code_item.output_policy = output_policy + code_item.output_policy = output_policy # type: ignore user_code_service.update_code_state(context, code_item) if isinstance(exec_result.result, ActionObject): result_action_object = ActionObject.link( @@ -437,7 +437,7 @@ def _user_code_execute( private_exec_result.result, update_policy=not override_execution_permission, ) - code_item.output_policy = output_policy + code_item.output_policy = output_policy # type: ignore user_code_service.update_code_state(context, code_item) result_action_object_private = wrap_result( result_id, private_exec_result.result @@ -1032,7 +1032,7 @@ def filter_twin_args(args: list[Any], twin_mode: TwinMode) -> Any: def filter_twin_kwargs( - kwargs: dict, twin_mode: TwinMode, allow_python_types=False + kwargs: dict, twin_mode: TwinMode, allow_python_types: bool = False ) -> Any: filtered = {} for k, v in kwargs.items(): diff --git a/packages/syft/src/syft/service/api/api.py b/packages/syft/src/syft/service/api/api.py index 0bd2c7fd579..6ef6ebbc7fc 100644 --- a/packages/syft/src/syft/service/api/api.py +++ b/packages/syft/src/syft/service/api/api.py @@ -17,10 +17,9 @@ from result import Err from result import Ok from result import Result -from syft.service.user.user_service import UserService # relative -from ...abstract_node import AbstractNode, NodeSideType +from ...abstract_node import AbstractNode from ...client.client import SyftClient from ...serde.serializable import serializable from ...serde.signature import signature_remove_context @@ -38,6 +37,7 @@ from ..context import AuthedServiceContext from ..response import SyftError from ..user.user import UserView +from ..user.user_service import UserService NOT_ACCESSIBLE_STRING = "N / A" @@ -238,7 +238,7 @@ def build_internal_context( state=self.state or {}, user=user, admin_client=admin_client, - user_client=user_client + user_client=user_client, ) def __call__(self, *args: Any, **kwargs: Any) -> Any: diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index bb6c999206b..153e9933b53 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -1141,7 +1141,7 @@ def syft_function_single_use( ) -def replace_func_name(src, new_func_name): +def replace_func_name(src: str, new_func_name: str) -> str: pattern = r"\bdef\s+(\w+)\s*\(" replacement = f"def {new_func_name}(" new_src = re.sub(pattern, replacement, src, count=1) diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index caa6b1da424..0e4c791d3ea 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -13,17 +13,17 @@ from io import StringIO import sys import types -from typing import Any, Type +from typing import Any from typing import ClassVar # third party from RestrictedPython import compile_restricted -from pydantic import field_validator, model_validator +from pydantic import field_validator +from pydantic import model_validator import requests from result import Err from result import Ok from result import Result -from syft.service.action.action_endpoint import CustomEndpointActionObject # relative from ...abstract_node import NodeType @@ -44,6 +44,7 @@ from ...types.twin_object import TwinObject from ...types.uid import UID from ...util.util import is_interpreter_jupyter +from ..action.action_endpoint import CustomEndpointActionObject from ..action.action_object import ActionObject from ..action.action_permissions import ActionObjectPermission from ..action.action_permissions import ActionPermission @@ -211,9 +212,9 @@ class CreatePolicyRuleConstant(CreatePolicyRule): __version__ = SYFT_OBJECT_VERSION_1 val: Any - klass: None | Type = None + klass: None | type = None - @model_validator(mode='before') + @model_validator(mode="before") @classmethod def set_klass(cls, data: Any) -> Any: val = data["val"] @@ -221,7 +222,7 @@ def set_klass(cls, data: Any) -> Any: klass = CustomEndpointActionObject else: klass = type(val) - data["klass"]= klass + data["klass"] = klass return data @field_validator("val", mode="after") @@ -231,7 +232,7 @@ def idify_endpoints(cls, value: str) -> str: return value.custom_function_actionobject_id() return value - def to_policy_rule(self, kw): + def to_policy_rule(self, kw: Any) -> PolicyRule: return Constant(kw=kw, val=self.val, klass=self.klass) @@ -254,13 +255,15 @@ class Constant(PolicyRule): __version__ = SYFT_OBJECT_VERSION_1 val: Any - klass: Type + klass: type requires_input: bool = False - def is_met(self, context: AuthedServiceContext, *args, **kwargs) -> bool: + def is_met(self, context: AuthedServiceContext, *args: Any, **kwargs: Any) -> bool: return True - def transform_kwarg(self, context: AuthedServiceContext, val) -> Result[Any, str]: + def transform_kwarg( + self, context: AuthedServiceContext, val: Any + ) -> Result[Any, str]: if isinstance(self.val, UID): if issubclass(self.klass, CustomEndpointActionObject): res = context.node.get_service("actionservice").get(context, self.val) @@ -309,7 +312,7 @@ def is_met( ) -def user_code_arg2id(arg): +def user_code_arg2id(arg: Any) -> UID: if isinstance(arg, ActionObject): uid = arg.id elif isinstance(arg, TwinObject): @@ -410,7 +413,7 @@ class MixedInputPolicy(InputPolicy): kwarg_rules: dict[NodeIdentity, dict[str, PolicyRule]] def __init__( - self, init_kwargs=None, client=None, *args: Any, **kwargs: Any + self, init_kwargs: Any = None, client: Any = None, *args: Any, **kwargs: Any ) -> None: if init_kwargs is not None: kwarg_rules = init_kwargs @@ -437,7 +440,9 @@ def __init__( *args, kwarg_rules=kwarg_rules, init_kwargs=kwarg_rules, **kwargs ) - def transform_kwargs(self, context: AuthedServiceContext, kwargs: dict[str, Any]) -> dict[str, Any]: + def transform_kwargs( + self, context: AuthedServiceContext, kwargs: dict[str, Any] + ) -> dict[str, Any]: for _, rules in self.kwarg_rules.items(): for kw, rule in rules.items(): if hasattr(rule, "transform_kwarg"): @@ -448,7 +453,9 @@ def transform_kwargs(self, context: AuthedServiceContext, kwargs: dict[str, Any] kwargs[kw] = res_val.ok() return Ok(kwargs) - def find_node_identity(self, kwargs: dict[str, Any], client=None) -> NodeIdentity: + def find_node_identity( + self, kwargs: dict[str, Any], client: Any = None + ) -> NodeIdentity: if client is not None: return NodeIdentity.from_api(client.api) @@ -515,7 +522,7 @@ def filter_kwargs( ) rule_check_args = (actionobject,) else: - rule_check_args = () + rule_check_args = () # type: ignore # TODO actionobject = rule.value if not rule.is_met(context, *rule_check_args): diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 78630b561b1..f174ef269b9 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -10,7 +10,6 @@ from result import Err from result import Ok from result import Result -from syft.types.syft_migration import migrate from typing_extensions import Self # relative @@ -25,13 +24,16 @@ from ...serde.serialize import _serialize from ...store.linked_obj import LinkedObject from ...types.datetime import DateTime +from ...types.syft_migration import migrate from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...types.syft_object import SyftObject from ...types.syncable_object import SyncableSyftObject -from ...types.transforms import TransformContext, drop, make_set_default +from ...types.transforms import TransformContext from ...types.transforms import add_node_uid_for_key +from ...types.transforms import drop from ...types.transforms import generate_id +from ...types.transforms import make_set_default from ...types.transforms import transform from ...types.twin_object import TwinObject from ...types.uid import LineageID diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index 7edadc9a888..9d0bead7089 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -24,7 +24,8 @@ from ..service import TYPE_TO_SERVICE from ..service import service_method from ..user.user import UserView -from ..user.user_roles import ADMIN_ROLE_LEVEL, DATA_SCIENTIST_ROLE_LEVEL +from ..user.user_roles import ADMIN_ROLE_LEVEL +from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL from ..user.user_roles import GUEST_ROLE_LEVEL from ..user.user_service import UserService from .request import Change diff --git a/packages/syft/src/syft/store/document_store.py b/packages/syft/src/syft/store/document_store.py index 811b97f0a3f..322f4ff5bdd 100644 --- a/packages/syft/src/syft/store/document_store.py +++ b/packages/syft/src/syft/store/document_store.py @@ -3,7 +3,6 @@ # stdlib from collections.abc import Callable -import threading import types import typing from typing import Any @@ -641,7 +640,6 @@ def set( add_storage_permission: bool = True, ignore_duplicates: bool = False, ) -> Result[BaseStash.object_type, str]: - res = self.partition.set( credentials=credentials, obj=obj, From d22636a9b5d1d7a6a0004e40ff85fbd0df1177c5 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 21 Jun 2024 13:16:01 +0200 Subject: [PATCH 247/313] cleanup --- .../bigquery/05-autoapprove-ratelimiter.ipynb | 553 ------------------ .../src/syft/service/action/action_service.py | 3 + packages/syft/src/syft/service/api/api.py | 5 +- .../syft/src/syft/service/policy/policy.py | 8 +- .../syft/src/syft/service/request/request.py | 8 +- .../syft/service/request/request_service.py | 3 +- .../src/syft/service/user/user_service.py | 12 - 7 files changed, 21 insertions(+), 571 deletions(-) delete mode 100644 notebooks/scenarios/bigquery/05-autoapprove-ratelimiter.ipynb diff --git a/notebooks/scenarios/bigquery/05-autoapprove-ratelimiter.ipynb b/notebooks/scenarios/bigquery/05-autoapprove-ratelimiter.ipynb deleted file mode 100644 index b6510e69cb1..00000000000 --- a/notebooks/scenarios/bigquery/05-autoapprove-ratelimiter.ipynb +++ /dev/null @@ -1,553 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "d23b4d75-94b3-4dc8-8741-c3fed91747c5", - "metadata": {}, - "outputs": [], - "source": [ - "project_id = \"\" # add your bigquery project_id\n", - "database = \"test_1gb\"\n", - "table = \"accounts\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f72e70bd-1aa4-45bc-b142-5a8c36756eb5", - "metadata": {}, - "outputs": [], - "source": [ - "query = f\"SELECT * FROM {project_id}.{database}.{table} limit 10\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5118c9cc-e3fc-4768-a646-5c507f39c733", - "metadata": {}, - "outputs": [], - "source": [ - "# !uv pip install db-dtypes\n", - "# !uv pip install google-cloud-bigquery==3.24.0" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "57c97872-0ebd-4ece-8c3e-88d0a27bfce8", - "metadata": {}, - "outputs": [], - "source": [ - "SYFT_VERSION = \">=0.8.7.b0,<0.9\"\n", - "package_string = f'\"syft{SYFT_VERSION}\"'\n", - "# %pip install {package_string} -q" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5313ef79-c7a0-4f5c-9a61-f2029d060c04", - "metadata": {}, - "outputs": [], - "source": [ - "# syft absolute\n", - "import syft as sy" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0d736170-3d96-47ca-921c-c97b6f3eca60", - "metadata": {}, - "outputs": [], - "source": [ - "node_name = \"bigquery-high-side\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9ec3c500-0ec1-4701-ae8a-b9509638b9ae", - "metadata": {}, - "outputs": [], - "source": [ - "node = sy.orchestra.launch(\n", - " name=node_name,\n", - " dev_mode=True,\n", - " create_producer=True,\n", - " n_consumers=1,\n", - " reset=True,\n", - " port=\"auto\",\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2ba848b8-cbda-4686-8ecd-392702fe504f", - "metadata": {}, - "outputs": [], - "source": [ - "high_client = node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6b4f7f3d-554c-4767-87c1-9650103f6014", - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib\n", - "import json\n", - "\n", - "SERVICE_ACCOUNT = json.loads(open(\"service_account.json\").read())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1701adfd-c570-4ebd-b75e-853fb3f665af", - "metadata": {}, - "outputs": [], - "source": [ - "@sy.api_endpoint_method(\n", - " settings={\"SERVICE_ACCOUNT\": SERVICE_ACCOUNT},\n", - ")\n", - "def query_endpoint(context, sql: str) -> str:\n", - " # syft absolute\n", - " import syft as sy\n", - "\n", - " result = None\n", - " try:\n", - " # build up the state object so we can track a users sql submissions\n", - " # stdlib\n", - " from datetime import datetime\n", - "\n", - " query_time = datetime.now()\n", - "\n", - " user_email = context.user.email\n", - " submissions = context.state.get(\"submissions\", {})\n", - " if user_email not in submissions:\n", - " submissions[user_email] = {\"queries\": {}, \"results\": {}}\n", - "\n", - " query_str = sql.strip()\n", - " # exit early\n", - " if query_str in submissions[user_email][\"queries\"]:\n", - " # they have already submitted this query so just return the existing uid\n", - " result_ptr_id = submissions[user_email][\"queries\"][query_str]\n", - " return (\n", - " \"Query submitted for approval. \"\n", - " + f'Call client.bigquery.get_result(uid=\"{result_ptr_id}\") '\n", - " + 'to get your result.\"'\n", - " )\n", - "\n", - " # do the query\n", - " # third party\n", - " from google.cloud import bigquery\n", - " from google.oauth2 import service_account\n", - "\n", - " credentials = service_account.Credentials.from_service_account_info(\n", - " context.settings[\"SERVICE_ACCOUNT\"]\n", - " )\n", - " scoped_credentials = credentials.with_scopes(\n", - " [\"https://www.googleapis.com/auth/bigquery\"]\n", - " )\n", - "\n", - " client = bigquery.Client(\n", - " credentials=scoped_credentials,\n", - " location=\"us-west1\",\n", - " )\n", - "\n", - " query_job = client.query(sql)\n", - " # convert to pandas dataframe\n", - " result = query_job.result().to_dataframe()\n", - " except Exception:\n", - " result = sy.SyftError(\n", - " message=f\"There was an error running {sql}. Please contact an admin.\"\n", - " )\n", - "\n", - " try:\n", - " # upload to blob storage\n", - " result_obj = sy.ActionObject.from_obj(result)\n", - "\n", - " def to_blobstorage(action_obj, client):\n", - " action_obj.syft_node_location = client.id\n", - " action_obj.syft_client_verify_key = client.verify_key\n", - " action_obj._save_to_blob_storage()\n", - " action_obj.syft_action_data_cache = action_obj.as_empty_data()\n", - " action_obj.syft_action_data_repr_ = str(action_obj.syft_action_data_type)\n", - " action_obj.syft_action_data_str_ = str(action_obj.syft_action_data_type)\n", - " action_obj_ptr = client.api.services.action.set(\n", - " action_obj, add_storage_permission=True\n", - " )\n", - " return action_obj_ptr\n", - "\n", - " result_ptr = to_blobstorage(result_obj, context.admin_client)\n", - "\n", - " # store time and uid of result in state and return to user\n", - " result_ptr_str = str(result_ptr.id).lower()\n", - " submissions[user_email][\"queries\"][query_str] = (\n", - " result_ptr_str # for this function\n", - " )\n", - " submissions[user_email][\"results\"][result_ptr_str] = (\n", - " query_time # for the results function\n", - " )\n", - " context.state[\"submissions\"] = submissions\n", - " return (\n", - " \"Query submitted for approval. \"\n", - " + f'Call client.bigquery.get_result(uid=\"{str(result_ptr.id)}\") '\n", - " + 'to get your result.\"'\n", - " )\n", - " except Exception:\n", - " # unable to generate the action object\n", - " return sy.SyftError(\n", - " message=f\"There was an error running {sql}. Please contact an admin.\"\n", - " )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "69defe0e-cfca-4f41-a62c-1df1a2268e4d", - "metadata": {}, - "outputs": [], - "source": [ - "query_endpoint.view_access" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4ffd4098-8b0c-4123-b526-e8010306e622", - "metadata": {}, - "outputs": [], - "source": [ - "query_endpoint.view_access = False\n", - "query_endpoint.view_access" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f339e26b-5186-434e-bdf5-002af0c44f48", - "metadata": {}, - "outputs": [], - "source": [ - "new_endpoint = sy.TwinAPIEndpoint(\n", - " path=\"bigquery.query\",\n", - " mock_function=query_endpoint,\n", - " private_function=query_endpoint,\n", - " endpoint_timeout=60,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "38c1858e-7262-4665-9366-40a6c4dab142", - "metadata": {}, - "outputs": [], - "source": [ - "high_client.api.services.api.delete(endpoint_path=\"bigquery.query\")\n", - "response = high_client.api.services.api.add(endpoint=new_endpoint)\n", - "response" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d01e4b1f-339a-4067-9320-5e530bf7e4ed", - "metadata": {}, - "outputs": [], - "source": [ - "@sy.api_endpoint(\n", - " path=\"bigquery.get_result\", settings={\"approve_seconds\": 60}, endpoint_timeout=60\n", - ")\n", - "def get_result(context, uid: str) -> str:\n", - " # syft absolute\n", - " import syft as sy\n", - "\n", - " try:\n", - " # check of the user and uid are in the state\n", - " # stdlib\n", - " from datetime import datetime\n", - " from datetime import timedelta\n", - "\n", - " user_email = context.user.email\n", - "\n", - " # get the submissions from the other endpoint state\n", - " endpoint = context.admin_client.api.services.api.get(api_path=\"bigquery.query\")\n", - " submissions = {}\n", - " if hasattr(endpoint, \"mock_function\"):\n", - " submissions_obj = endpoint.mock_function.state\n", - " submissions = submissions_obj.get(\"submissions\", {})\n", - "\n", - " if user_email not in submissions:\n", - " submissions[user_email] = {\"queries\": {}, \"results\": {}}\n", - "\n", - " uid_str = uid.strip().lower()\n", - " if uid_str not in submissions[user_email][\"results\"]:\n", - " # no uid for this user\n", - " return f'There is no result matching {uid}. Please contact an admin.\"'\n", - " request_time = submissions[user_email][\"results\"][uid_str]\n", - " seconds = int(context.settings[\"approve_seconds\"])\n", - "\n", - " delta = timedelta(seconds=seconds)\n", - " result_ready = request_time + delta\n", - " if datetime.now() < result_ready:\n", - " # query not ready\n", - " return sy.SyftNotReady(\n", - " message=f\"The request to run the query for {uid} is pending. Try again later.\"\n", - " )\n", - " # fall through\n", - " except Exception:\n", - " return sy.SyftError(\n", - " message=f\"There was an error fetching {uid}. Please contact an admin.\"\n", - " )\n", - "\n", - " try:\n", - " # go get the data and return it\n", - " id = sy.UID(uid)\n", - " action_obj = context.admin_client.api.services.action.get(uid=id)\n", - " return action_obj\n", - " except Exception:\n", - " return sy.SyftError(\n", - " message=f\"There was an error fetching {uid}. Please contact an admin.\"\n", - " )\n", - "\n", - "\n", - "high_client.api.services.api.delete(endpoint_path=\"bigquery.get_result\")\n", - "response = high_client.api.services.api.add(endpoint=get_result)\n", - "response" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "030fd15a-edc7-41e4-b59f-463e8be598bc", - "metadata": {}, - "outputs": [], - "source": [ - "high_client.api.services.api" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16c9f2f0-a988-416d-b52a-628cf27f8bff", - "metadata": {}, - "outputs": [], - "source": [ - "high_client.register(\n", - " name=\"Jane Doe\",\n", - " email=\"jane@caltech.edu\",\n", - " password=\"abc123\",\n", - " password_verify=\"abc123\",\n", - " institution=\"Caltech\",\n", - " website=\"https://www.caltech.edu/\",\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7c20a33e-5784-4f10-a08d-22293bc939bc", - "metadata": {}, - "outputs": [], - "source": [ - "jane_client = high_client.login_as(email=\"jane@caltech.edu\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9f752816-8347-4ca0-9dbe-1e336b593eca", - "metadata": {}, - "outputs": [], - "source": [ - "res = jane_client.api.bigquery.query(sql=query)\n", - "res" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0c66df31-6a98-442e-847f-b36e304d050f", - "metadata": {}, - "outputs": [], - "source": [ - "# stdlib\n", - "import re\n", - "\n", - "\n", - "def extract_uid(input_string):\n", - " match = re.search(r'uid=\"([^\"]+)\"', input_string)\n", - " if match:\n", - " return match.group(1)\n", - " else:\n", - " return None\n", - "\n", - "\n", - "uid = extract_uid(str(res))\n", - "uid" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a36ecb92-2570-4151-a2dc-404d24dbabee", - "metadata": {}, - "outputs": [], - "source": [ - "res = jane_client.api.bigquery.get_result(uid=uid)\n", - "res" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3f0f4f5e-40f2-47af-8ec2-a08932beaf94", - "metadata": {}, - "outputs": [], - "source": [ - "res = jane_client.api.bigquery.get_result(uid=uid)\n", - "res" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c4a919a9-3c6a-4d84-9d63-d95888c2b920", - "metadata": {}, - "outputs": [], - "source": [ - "@sy.api_endpoint_method()\n", - "def set_endpoint_state(context, endpoint_path: str, state: dict):\n", - " # syft absolute\n", - " import syft as sy\n", - "\n", - " if hasattr(state, \"syft_action_data\"):\n", - " state = state.syft_action_data\n", - " if not isinstance(state, dict):\n", - " return sy.SyftError(message=f\"state is {type(state)}, must be dict\")\n", - " result = context.admin_client.api.services.api.set_state(\n", - " api_path=endpoint_path, state=state, both=True\n", - " )\n", - " return result\n", - "\n", - "\n", - "@sy.api_endpoint_method()\n", - "def empty_mock(context, endpoint_path: str, state: dict):\n", - " return \"not allowed\"\n", - "\n", - "\n", - "new_endpoint = sy.TwinAPIEndpoint(\n", - " path=\"state.set\",\n", - " mock_function=empty_mock,\n", - " private_function=set_endpoint_state,\n", - ")\n", - "\n", - "high_client.api.services.api.delete(endpoint_path=\"state.set\")\n", - "high_client.api.services.api.add(endpoint=new_endpoint)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d2d191f8-7c64-4a01-9e40-be77952321d1", - "metadata": {}, - "outputs": [], - "source": [ - "# get the state\n", - "current_state = high_client.api.services.api.get(\n", - " api_path=\"bigquery.query\"\n", - ").mock_function.state\n", - "current_state" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "cadc7de4-5e2b-437c-9f5b-b944638e45a3", - "metadata": {}, - "outputs": [], - "source": [ - "# reset the state\n", - "high_client.api.services.state.set(endpoint_path=\"bigquery.query\", state={})" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "de988b2d-0d3d-4eec-b981-396698f8acb6", - "metadata": {}, - "outputs": [], - "source": [ - "high_client.api.services.api.get(api_path=\"bigquery.query\").mock_function.state" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2e91fa45-bc2f-4312-83ac-e308e41ba58d", - "metadata": {}, - "outputs": [], - "source": [ - "current_state" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0c227eef-7b7e-454f-9285-0cd69309e871", - "metadata": {}, - "outputs": [], - "source": [ - "# restore it\n", - "high_client.api.services.state.set(endpoint_path=\"bigquery.query\", state=current_state)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "284f0ee0-801c-4af4-b8bf-eb99246fb0d0", - "metadata": {}, - "outputs": [], - "source": [ - "high_client.api.services.api.get(api_path=\"bigquery.query\").mock_function.state" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1b84e0ef-b7ac-43c1-8765-3e02888156c9", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.2" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/packages/syft/src/syft/service/action/action_service.py b/packages/syft/src/syft/service/action/action_service.py index 6b812a92f34..59b8bd500bc 100644 --- a/packages/syft/src/syft/service/action/action_service.py +++ b/packages/syft/src/syft/service/action/action_service.py @@ -7,6 +7,8 @@ from result import Err from result import Ok from result import Result + +# syft absolute from syft.service.action.action_endpoint import CustomEndpointActionObject from syft.service.api.api import TwinAPIEndpoint @@ -1049,6 +1051,7 @@ def filter_twin_kwargs( elif isinstance(v, str | int | float | dict | CustomEndpointActionObject) and allow_python_types: filtered[k] = v else: + # third party import ipdb ipdb.set_trace() raise ValueError( diff --git a/packages/syft/src/syft/service/api/api.py b/packages/syft/src/syft/service/api/api.py index 0bd2c7fd579..99940c31c39 100644 --- a/packages/syft/src/syft/service/api/api.py +++ b/packages/syft/src/syft/service/api/api.py @@ -17,10 +17,13 @@ from result import Err from result import Ok from result import Result + +# syft absolute from syft.service.user.user_service import UserService # relative -from ...abstract_node import AbstractNode, NodeSideType +from ...abstract_node import AbstractNode +from ...abstract_node import NodeSideType from ...client.client import SyftClient from ...serde.serializable import serializable from ...serde.signature import signature_remove_context diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index caa6b1da424..04dae0e4fe0 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -13,16 +13,20 @@ from io import StringIO import sys import types -from typing import Any, Type +from typing import Any from typing import ClassVar +from typing import Type # third party from RestrictedPython import compile_restricted -from pydantic import field_validator, model_validator +from pydantic import field_validator +from pydantic import model_validator import requests from result import Err from result import Ok from result import Result + +# syft absolute from syft.service.action.action_endpoint import CustomEndpointActionObject # relative diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 78630b561b1..de5f5f3e191 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -10,9 +10,11 @@ from result import Err from result import Ok from result import Result -from syft.types.syft_migration import migrate from typing_extensions import Self +# syft absolute +from syft.types.syft_migration import migrate + # relative from ...abstract_node import NodeSideType from ...client.api import APIRegistry @@ -29,9 +31,11 @@ from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...types.syft_object import SyftObject from ...types.syncable_object import SyncableSyftObject -from ...types.transforms import TransformContext, drop, make_set_default +from ...types.transforms import TransformContext from ...types.transforms import add_node_uid_for_key +from ...types.transforms import drop from ...types.transforms import generate_id +from ...types.transforms import make_set_default from ...types.transforms import transform from ...types.twin_object import TwinObject from ...types.uid import LineageID diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index 7edadc9a888..9d0bead7089 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -24,7 +24,8 @@ from ..service import TYPE_TO_SERVICE from ..service import service_method from ..user.user import UserView -from ..user.user_roles import ADMIN_ROLE_LEVEL, DATA_SCIENTIST_ROLE_LEVEL +from ..user.user_roles import ADMIN_ROLE_LEVEL +from ..user.user_roles import DATA_SCIENTIST_ROLE_LEVEL from ..user.user_roles import GUEST_ROLE_LEVEL from ..user.user_service import UserService from .request import Change diff --git a/packages/syft/src/syft/service/user/user_service.py b/packages/syft/src/syft/service/user/user_service.py index fa586353fc4..25d85bccd9f 100644 --- a/packages/syft/src/syft/service/user/user_service.py +++ b/packages/syft/src/syft/service/user/user_service.py @@ -377,18 +377,6 @@ def get_target_object( else: return user - @service_method( - path="user.key_for_email", name="key_for_email", roles=DATA_OWNER_ROLE_LEVEL - ) - def key_for_email( - self, context: AuthedServiceContext, email: str - ) -> UserPrivateKey | SyftError: - result = self.stash.get_by_email(credentials=context.credentials, email=email) - if result.is_ok(): - user = result.ok() - return user.to(UserPrivateKey) - return SyftError(message=str(result.err())) - @service_method(path="user.delete", name="delete", roles=GUEST_ROLE_LEVEL) def delete(self, context: AuthedServiceContext, uid: UID) -> bool | SyftError: # third party From 0a8f22af5f14783f5ae826c6fab480b7c05e0945 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Fri, 21 Jun 2024 13:26:10 +0200 Subject: [PATCH 248/313] fix: twin api can be updated through set Syncing uses `set` method for various services to sync items. --- .../syft/src/syft/service/api/api_service.py | 17 ++++++----- tests/integration/local/twin_api_sync_test.py | 29 +++++++++++++++++-- 2 files changed, 37 insertions(+), 9 deletions(-) diff --git a/packages/syft/src/syft/service/api/api_service.py b/packages/syft/src/syft/service/api/api_service.py index 8ab77ad2667..3dcd1ab20ac 100644 --- a/packages/syft/src/syft/service/api/api_service.py +++ b/packages/syft/src/syft/service/api/api_service.py @@ -69,15 +69,18 @@ def set( except ValueError as e: return SyftError(message=str(e)) - endpoint_exists = self.stash.path_exists(context.credentials, new_endpoint.path) + if isinstance(endpoint, CreateTwinAPIEndpoint): + endpoint_exists = self.stash.path_exists( + context.credentials, new_endpoint.path + ) - if endpoint_exists.is_err(): - return SyftError(message=endpoint_exists.err()) + if endpoint_exists.is_err(): + return SyftError(message=endpoint_exists.err()) - if endpoint_exists.is_ok() and endpoint_exists.ok(): - return SyftError( - message="An API endpoint already exists at the given path." - ) + if endpoint_exists.is_ok() and endpoint_exists.ok(): + return SyftError( + message="An API endpoint already exists at the given path." + ) result = self.stash.upsert(context.credentials, endpoint=new_endpoint) if result.is_err(): diff --git a/tests/integration/local/twin_api_sync_test.py b/tests/integration/local/twin_api_sync_test.py index c6b1a152789..b30b4e50382 100644 --- a/tests/integration/local/twin_api_sync_test.py +++ b/tests/integration/local/twin_api_sync_test.py @@ -121,12 +121,37 @@ def compute(query): private_res, SyftError ), "Should not be able to access private function on low side." - # updating twin api endpoint works - high_client.custom_api.update(endpoint_path="testapi.query", endpoint_timeout=60) + # verify updating twin api endpoint works + + timeout_before = ( + full_low_worker.python_node.get_service("apiservice") + .stash.get_all( + credentials=full_low_worker.client.credentials, has_permission=True + ) + .ok()[0] + .endpoint_timeout + ) + expected_timeout_after = timeout_before + 1 + + high_client.custom_api.update( + endpoint_path="testapi.query", endpoint_timeout=expected_timeout_after + ) widget = sy.sync(from_client=high_client, to_client=low_client) result = widget[0].click_sync() assert result, result + timeout_after = ( + full_low_worker.python_node.get_service("apiservice") + .stash.get_all( + credentials=full_low_worker.client.credentials, has_permission=True + ) + .ok()[0] + .endpoint_timeout + ) + assert ( + timeout_after == expected_timeout_after + ), "Timeout should be updated on low side." + @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") @pytest.mark.local_node From a95efc3d4be4bcebdd01f551fbb2e49dc9673094 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 13:39:09 +0200 Subject: [PATCH 249/313] feat: move sanitization fn to utils --- packages/syft/src/syft/service/response.py | 6 ++--- .../syft/util/notebook_ui/components/sync.py | 1 - .../components/tabulator_template.py | 9 +++---- packages/syft/src/syft/util/patch_ipython.py | 12 ++++----- packages/syft/src/syft/util/table.py | 6 ++--- packages/syft/src/syft/util/util.py | 27 +++++++++++++++++++ 6 files changed, 41 insertions(+), 20 deletions(-) diff --git a/packages/syft/src/syft/service/response.py b/packages/syft/src/syft/service/response.py index 12070a42b54..adce4b8af73 100644 --- a/packages/syft/src/syft/service/response.py +++ b/packages/syft/src/syft/service/response.py @@ -4,12 +4,12 @@ from typing import Any # third party -import nh3 from result import Err # relative from ..serde.serializable import serializable from ..types.base import SyftBaseModel +from ..util.util import sanitize_html class SyftResponseMessage(SyftBaseModel): @@ -45,7 +45,7 @@ def _repr_html_(self) -> str: f'
' f"{type(self).__name__}: " f'
'
-            f"{nh3.clean(self.message, clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}})}

" + f"{sanitize_html(self.message)}
" ) @@ -108,7 +108,7 @@ def _repr_html_class_(self) -> str: def _repr_html_(self) -> str: return ( f'
' - + f"{type(self).__name__}: {nh3.clean(self.args, clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}})}

" + + f"{type(self).__name__}: {sanitize_html(self.args)}
" ) @staticmethod diff --git a/packages/syft/src/syft/util/notebook_ui/components/sync.py b/packages/syft/src/syft/util/notebook_ui/components/sync.py index 9e8877ed70c..4fdd0adf1b3 100644 --- a/packages/syft/src/syft/util/notebook_ui/components/sync.py +++ b/packages/syft/src/syft/util/notebook_ui/components/sync.py @@ -3,7 +3,6 @@ from typing import Any # third party -import nh3 from pydantic import model_validator # relative diff --git a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py index 9199aa0acbc..69e0fdbff0f 100644 --- a/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py +++ b/packages/syft/src/syft/util/notebook_ui/components/tabulator_template.py @@ -8,13 +8,13 @@ from IPython.display import display import jinja2 from loguru import logger -import nh3 # relative from ...assets import load_css from ...assets import load_js from ...table import TABLE_INDEX_KEY from ...table import prepare_table_data +from ...util import sanitize_html from ..icons import Icon DEFAULT_ID_WIDTH = 110 @@ -69,8 +69,7 @@ def format_dict(data: Any) -> str: return data if set(data.keys()) != {"type", "value"}: - return nh3.clean(str(data), clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}}) - + return sanitize_html(str(data)) if "badge" in data["type"]: return Badge(value=data["value"], badge_class=data["type"]).to_html() elif "label" in data["type"]: @@ -78,7 +77,7 @@ def format_dict(data: Any) -> str: if "clipboard" in data["type"]: return CopyButton(copy_text=data["value"]).to_html() - return nh3.clean(str(data), clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}}) + return sanitize_html(str(data)) def format_table_data(table_data: list[dict[str, Any]]) -> list[dict[str, str]]: @@ -87,7 +86,7 @@ def format_table_data(table_data: list[dict[str, Any]]) -> list[dict[str, str]]: row_formatted: dict[str, str] = {} for k, v in row.items(): if isinstance(v, str): - row_formatted[k] = nh3.clean(v.replace("\n", "
"), clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}}) + row_formatted[k] = sanitize_html(v.replace("\n", "
")) continue v_formatted = format_dict(v) row_formatted[k] = v_formatted diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index 79d050ba955..5e47ac5ef98 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -6,6 +6,7 @@ # relative from ..types.dicttuple import DictTuple from ..types.syft_object import SyftObject +from .util import sanitize_html def _patch_ipython_sanitization() -> None: @@ -22,18 +23,15 @@ def _patch_ipython_sanitization() -> None: # stdlib from importlib import resources - # third party - import nh3 - # relative from .assets import load_css from .assets import load_js + from .notebook_ui.components.sync import ALERT_CSS + from .notebook_ui.components.sync import COPY_CSS from .notebook_ui.styles import CSS_CODE from .notebook_ui.styles import FONT_CSS from .notebook_ui.styles import ITABLES_CSS from .notebook_ui.styles import JS_DOWNLOAD_FONTS - from .notebook_ui.components.sync import ALERT_CSS - from .notebook_ui.components.sync import COPY_CSS tabulator_js = load_js("tabulator.min.js") tabulator_js = tabulator_js.replace( @@ -79,7 +77,7 @@ def display_sanitized_html(obj: SyftObject | DictTuple) -> str | None: template = "\n".join(matching_template) sanitized_str = escaped_template.sub("", html_str) sanitized_str = escaped_js_css.sub("", sanitized_str) - sanitized_str = nh3.clean(sanitized_str, clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}}) + sanitized_str = sanitize_html(sanitized_str) return f"{css_reinsert} {sanitized_str} {template}" return None @@ -87,7 +85,7 @@ def display_sanitized_md(obj: SyftObject) -> str | None: if callable(getattr(obj, "_repr_markdown_", None)): md = obj._repr_markdown_() if md is not None: - return nh3.clean(md, clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}}) + return sanitize_html(md) return None ip.display_formatter.formatters["text/html"].for_type( diff --git a/packages/syft/src/syft/util/table.py b/packages/syft/src/syft/util/table.py index 60510161a5c..296596f33a9 100644 --- a/packages/syft/src/syft/util/table.py +++ b/packages/syft/src/syft/util/table.py @@ -6,11 +6,9 @@ import re from typing import Any -# third party -import nh3 - # relative from .util import full_name_with_qualname +from .util import sanitize_html TABLE_INDEX_KEY = "_table_repr_index" @@ -134,7 +132,7 @@ def _create_table_rows( except Exception as e: print(e) value = None - cols[field].append(nh3.clean(str(value), clean_content_tags={"script", "style"}, attributes={"*": {"style", "class"}})) + cols[field].append(sanitize_html(str(value))) col_lengths = {len(cols[col]) for col in cols.keys()} if len(col_lengths) != 1: diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index b0affa2b1a0..59f7974ae9c 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -29,12 +29,14 @@ import types from types import ModuleType from typing import Any +from copy import deepcopy # third party from IPython.display import display from forbiddenfruit import curse from nacl.signing import SigningKey from nacl.signing import VerifyKey +import nh3 import requests # relative @@ -919,3 +921,28 @@ def get_queue_address(port: int) -> str: def get_dev_mode() -> bool: return str_to_bool(os.getenv("DEV_MODE", "False")) + + +def sanitize_html(html: str) -> str: + policy = { + 'tags': ['svg', 'strong', 'rect', 'path', 'circle'], + 'attributes': { + '*': {'class', 'style'}, + 'svg': {'class', 'style', 'xmlns', 'width', 'height', 'viewBox', 'fill', 'stroke', 'stroke-width'}, + 'path': {'d', 'fill', 'stroke', 'stroke-width'}, + 'rect': {'x', 'y', 'width', 'height', 'fill', 'stroke', 'stroke-width'}, + 'circle': {'cx', 'cy', 'r', 'fill', 'stroke', 'stroke-width'}, + }, + 'remove': {'script', 'style'} + } + + tags = nh3.ALLOWED_TAGS + for tag in policy['tags']: + tags.add(tag) + + attributes = deepcopy(nh3.ALLOWED_ATTRIBUTES) + attributes = {**attributes, **policy['attributes']} + + return nh3.clean( + html, tags=tags, clean_content_tags=policy['remove'], attributes=attributes, + ) From c4f167124c4d25b0bcb7713ac5c440aa4b4c132f Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 13:57:38 +0200 Subject: [PATCH 250/313] fix: broken svg, lint styles --- packages/syft/src/syft/assets/css/style.css | 11 +++++++---- packages/syft/src/syft/assets/svg/copy.svg | 6 ++---- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/packages/syft/src/syft/assets/css/style.css b/packages/syft/src/syft/assets/css/style.css index 113a9908055..b4e624170b0 100644 --- a/packages/syft/src/syft/assets/css/style.css +++ b/packages/syft/src/syft/assets/css/style.css @@ -567,7 +567,11 @@ body { background-color: #c2def0; } -.syft-user_code, .syft-project, .syft-project-create, .syft-dataset, .syft-syncstate { +.syft-user_code, +.syft-project, +.syft-project-create, +.syft-dataset, +.syft-syncstate { color: var(--surface-color); } @@ -575,10 +579,9 @@ body { .syft-dataset p, .syft-syncstate h3, .syft-syncstate p { - font-family: 'Open Sans'; + font-family: "Open Sans"; } .diff-container { - border: 0.5px solid #B4B0BF; + border: 0.5px solid #b4b0bf; } - diff --git a/packages/syft/src/syft/assets/svg/copy.svg b/packages/syft/src/syft/assets/svg/copy.svg index aadd5116ebb..9e43a5b27f2 100644 --- a/packages/syft/src/syft/assets/svg/copy.svg +++ b/packages/syft/src/syft/assets/svg/copy.svg @@ -1,5 +1,3 @@ - - \ No newline at end of file + + From a1fb642e2ceff33c80dd63f095930c32938b19b0 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 14:00:13 +0200 Subject: [PATCH 251/313] lint --- packages/syft/src/syft/util/util.py | 37 +++++++++++++++++++---------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 59f7974ae9c..979c6673021 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -7,6 +7,7 @@ from concurrent.futures import ProcessPoolExecutor from concurrent.futures import ThreadPoolExecutor from contextlib import contextmanager +from copy import deepcopy import functools import hashlib from itertools import repeat @@ -29,7 +30,6 @@ import types from types import ModuleType from typing import Any -from copy import deepcopy # third party from IPython.display import display @@ -925,24 +925,37 @@ def get_dev_mode() -> bool: def sanitize_html(html: str) -> str: policy = { - 'tags': ['svg', 'strong', 'rect', 'path', 'circle'], - 'attributes': { - '*': {'class', 'style'}, - 'svg': {'class', 'style', 'xmlns', 'width', 'height', 'viewBox', 'fill', 'stroke', 'stroke-width'}, - 'path': {'d', 'fill', 'stroke', 'stroke-width'}, - 'rect': {'x', 'y', 'width', 'height', 'fill', 'stroke', 'stroke-width'}, - 'circle': {'cx', 'cy', 'r', 'fill', 'stroke', 'stroke-width'}, + "tags": ["svg", "strong", "rect", "path", "circle"], + "attributes": { + "*": {"class", "style"}, + "svg": { + "class", + "style", + "xmlns", + "width", + "height", + "viewBox", + "fill", + "stroke", + "stroke-width", + }, + "path": {"d", "fill", "stroke", "stroke-width"}, + "rect": {"x", "y", "width", "height", "fill", "stroke", "stroke-width"}, + "circle": {"cx", "cy", "r", "fill", "stroke", "stroke-width"}, }, - 'remove': {'script', 'style'} + "remove": {"script", "style"}, } tags = nh3.ALLOWED_TAGS - for tag in policy['tags']: + for tag in policy["tags"]: tags.add(tag) attributes = deepcopy(nh3.ALLOWED_ATTRIBUTES) - attributes = {**attributes, **policy['attributes']} + attributes = {**attributes, **policy["attributes"]} return nh3.clean( - html, tags=tags, clean_content_tags=policy['remove'], attributes=attributes, + html, + tags=tags, + clean_content_tags=policy["remove"], + attributes=attributes, ) From c78e40adafdc2796a2344e5699f78ae68080412b Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 14:16:01 +0200 Subject: [PATCH 252/313] fix(notebooks): replace make_base_laplace with make_laplace --- notebooks/api/0.8/01-submit-code.ipynb | 2 +- notebooks/api/0.8/06-multiple-code-requests.ipynb | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/notebooks/api/0.8/01-submit-code.ipynb b/notebooks/api/0.8/01-submit-code.ipynb index 761d1a96e7a..3d360c88b18 100644 --- a/notebooks/api/0.8/01-submit-code.ipynb +++ b/notebooks/api/0.8/01-submit-code.ipynb @@ -306,7 +306,7 @@ " dp.enable_features(\"contrib\")\n", "\n", " aggregate = 0.0\n", - " base_lap = dp.m.make_base_laplace(\n", + " base_lap = dp.m.make_laplace(\n", " dp.atom_domain(T=float),\n", " dp.absolute_distance(T=float),\n", " scale=5.0,\n", diff --git a/notebooks/api/0.8/06-multiple-code-requests.ipynb b/notebooks/api/0.8/06-multiple-code-requests.ipynb index 4be948cc00b..a52b5c6b38f 100644 --- a/notebooks/api/0.8/06-multiple-code-requests.ipynb +++ b/notebooks/api/0.8/06-multiple-code-requests.ipynb @@ -203,7 +203,7 @@ "\n", " # compute sum\n", " res = data.sum()\n", - " base_lap = dp.m.make_base_laplace(\n", + " base_lap = dp.m.make_laplace(\n", " dp.atom_domain(T=float),\n", " dp.absolute_distance(T=float),\n", " scale=10.0,\n", @@ -304,7 +304,7 @@ "\n", " # compute mean\n", " mean = data.mean()\n", - " base_lap = dp.m.make_base_laplace(\n", + " base_lap = dp.m.make_laplace(\n", " dp.atom_domain(T=float),\n", " dp.absolute_distance(T=float),\n", " scale=10.0,\n", From 6cc4507f7fd925a6a2985b2f13b7278619e3a0be Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 14:31:50 +0200 Subject: [PATCH 253/313] feat: add more styles to styles.css --- packages/syft/src/syft/assets/css/style.css | 33 ++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/assets/css/style.css b/packages/syft/src/syft/assets/css/style.css index b4e624170b0..d5ebf822b62 100644 --- a/packages/syft/src/syft/assets/css/style.css +++ b/packages/syft/src/syft/assets/css/style.css @@ -570,13 +570,20 @@ body { .syft-user_code, .syft-project, .syft-project-create, +.syft-settings, .syft-dataset, -.syft-syncstate { +.syft-asset, +.syft-contributor, +.syft-request, +.syft-syncstate, +.job-info { color: var(--surface-color); } .syft-dataset h3, .syft-dataset p, +.syft-asset h3, +.syft-asset p, .syft-syncstate h3, .syft-syncstate p { font-family: "Open Sans"; @@ -585,3 +592,27 @@ body { .diff-container { border: 0.5px solid #b4b0bf; } + +.syft-container { + padding: 5px; + font-family: 'Open Sans'; +} + +.syft-alert-info { + color: #1F567A; + background-color: #C2DEF0; + border-radius: 4px; + padding: 5px; + padding: 13px 10px +} + +.syft-code-block { + background-color: #f7f7f7; + border: 1px solid #cfcfcf; + padding: 0px 2px; +} + +.syft-space { + margin-top: 1em; +} + From d0cd6fd71eb259f90b9cf6354f9e0d0f0aab5b97 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 14:42:26 +0200 Subject: [PATCH 254/313] lint --- packages/syft/src/syft/assets/css/style.css | 9 ++++----- packages/syft/src/syft/util/util.py | 4 ++-- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/assets/css/style.css b/packages/syft/src/syft/assets/css/style.css index d5ebf822b62..beece1fa2c0 100644 --- a/packages/syft/src/syft/assets/css/style.css +++ b/packages/syft/src/syft/assets/css/style.css @@ -595,15 +595,15 @@ body { .syft-container { padding: 5px; - font-family: 'Open Sans'; + font-family: "Open Sans"; } .syft-alert-info { - color: #1F567A; - background-color: #C2DEF0; + color: #1f567a; + background-color: #c2def0; border-radius: 4px; padding: 5px; - padding: 13px 10px + padding: 13px 10px; } .syft-code-block { @@ -615,4 +615,3 @@ body { .syft-space { margin-top: 1em; } - diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 979c6673021..0860eb9e5e4 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -950,8 +950,8 @@ def sanitize_html(html: str) -> str: for tag in policy["tags"]: tags.add(tag) - attributes = deepcopy(nh3.ALLOWED_ATTRIBUTES) - attributes = {**attributes, **policy["attributes"]} + _attributes = deepcopy(nh3.ALLOWED_ATTRIBUTES) + attributes = {**_attributes, **policy["attributes"]} # type: ignore return nh3.clean( html, From b94afc3627531f87c1662b3cee06eb9076575072 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 21 Jun 2024 14:51:42 +0200 Subject: [PATCH 255/313] add error handlign for wrong args --- packages/syft/src/syft/service/api/api.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/api/api.py b/packages/syft/src/syft/service/api/api.py index 6ef6ebbc7fc..cd46cf7bab9 100644 --- a/packages/syft/src/syft/service/api/api.py +++ b/packages/syft/src/syft/service/api/api.py @@ -551,7 +551,8 @@ def exec_code( return result except Exception as e: # If it's admin, return the error message. - if context.role.value == 128: + # TODO: cleanup typeerrors + if context.role.value == 128 or isinstance(e, TypeError): return SyftError( message=f"An error was raised during the execution of the API endpoint call: \n {str(e)}" ) From b8172fd57bc3866cee2abe3b206bf866c368c8d7 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Fri, 21 Jun 2024 14:52:12 +0200 Subject: [PATCH 256/313] bump uv --- .github/workflows/cd-docs.yml | 2 +- .github/workflows/cd-post-release-tests.yml | 4 ++-- .github/workflows/cd-syft.yml | 4 ++-- .github/workflows/e2e-tests-notebook.yml | 2 +- .github/workflows/pr-tests-frontend.yml | 4 ++-- .github/workflows/pr-tests-linting.yml | 4 ++-- .github/workflows/pr-tests-stack.yml | 16 ++++++++-------- .github/workflows/pr-tests-syft.yml | 16 ++++++++-------- 8 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/workflows/cd-docs.yml b/.github/workflows/cd-docs.yml index ff042e74017..de0288df04c 100644 --- a/.github/workflows/cd-docs.yml +++ b/.github/workflows/cd-docs.yml @@ -27,7 +27,7 @@ jobs: - name: Install tox run: | - pip install --upgrade pip uv==0.1.35 tox tox-uv==1.5.1 + pip install --upgrade pip uv==0.2.13 tox tox-uv==1.9.0 uv --version - name: Build the docs diff --git a/.github/workflows/cd-post-release-tests.yml b/.github/workflows/cd-post-release-tests.yml index 370469ea0bb..5ff48092b1e 100644 --- a/.github/workflows/cd-post-release-tests.yml +++ b/.github/workflows/cd-post-release-tests.yml @@ -132,7 +132,7 @@ jobs: - name: Install tox run: | - pip install --upgrade pip uv==0.1.35 tox tox-uv==1.5.1 + pip install --upgrade pip uv==0.2.13 tox tox-uv==1.9.0 - name: Run K8s tests env: @@ -193,7 +193,7 @@ jobs: - name: Install tox and uv run: | - pip install --upgrade pip uv==0.1.35 tox tox-uv==1.5.1 tox-current-env + pip install --upgrade pip uv==0.2.13 tox tox-uv==1.9.0 tox-current-env - name: Run unit tests run: | diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index 486196ecbdb..9895d28c5e7 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -133,7 +133,7 @@ jobs: - name: Install dependencies run: | - pip install --upgrade pip uv==0.1.35 bump2version tox tox-uv==1.5.1 + pip install --upgrade pip uv==0.2.13 bump2version tox tox-uv==1.9.0 uv --version - name: Get Release tag @@ -385,7 +385,7 @@ jobs: python-version: "3.12" - name: Install dependencies run: | - pip install --upgrade pip uv==0.1.35 tox tox-uv==1.5.1 setuptools wheel twine bump2version PyYAML + pip install --upgrade pip uv==0.2.13 tox tox-uv==1.9.0 setuptools wheel twine bump2version PyYAML uv --version - name: Bump the Version diff --git a/.github/workflows/e2e-tests-notebook.yml b/.github/workflows/e2e-tests-notebook.yml index 10c3eb84e2d..d2066e20fb6 100644 --- a/.github/workflows/e2e-tests-notebook.yml +++ b/.github/workflows/e2e-tests-notebook.yml @@ -63,7 +63,7 @@ jobs: - name: Install Deps run: | - pip install --upgrade pip uv==0.1.35 tox tox-uv==1.5.1 + pip install --upgrade pip uv==0.2.13 tox tox-uv==1.9.0 - name: Get pip cache dir id: pip-cache diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml index ae5669e4dc9..47fbbd7c377 100644 --- a/.github/workflows/pr-tests-frontend.yml +++ b/.github/workflows/pr-tests-frontend.yml @@ -46,7 +46,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade pip uv==0.1.35 + pip install --upgrade pip uv==0.2.13 uv --version - name: Get pip cache dir @@ -72,7 +72,7 @@ jobs: - name: Install Tox if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade tox tox-uv==1.5.1 + pip install --upgrade tox tox-uv==1.9.0 - name: Remove existing containers if: steps.changes.outputs.frontend == 'true' diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index fdb90728c2a..f8759a81f6d 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -29,7 +29,7 @@ jobs: - name: Install pip packages run: | - pip install --upgrade pip uv==0.1.35 + pip install --upgrade pip uv==0.2.13 uv --version - name: Get pip cache dir @@ -49,7 +49,7 @@ jobs: - name: Install Tox run: | - pip install --upgrade tox tox-uv==1.5.1 + pip install --upgrade tox tox-uv==1.9.0 - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index 34620e3fa80..a813474e70c 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -52,7 +52,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==0.1.35 + pip install --upgrade pip uv==0.2.13 uv --version - name: Get pip cache dir @@ -74,7 +74,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==1.5.1 + pip install --upgrade tox tox-uv==1.9.0 - name: Run syft backend base image building test if: steps.changes.outputs.stack == 'true' @@ -113,7 +113,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==0.1.35 + pip install --upgrade pip uv==0.2.13 uv --version - name: Get pip cache dir @@ -135,7 +135,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==1.5.1 + pip install --upgrade tox tox-uv==1.9.0 - name: Run Syft Integration Tests if: steps.changes.outputs.stack == 'true' @@ -200,7 +200,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==0.1.35 + pip install --upgrade pip uv==0.2.13 uv --version - name: Get pip cache dir @@ -222,7 +222,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==1.5.1 + pip install --upgrade tox tox-uv==1.9.0 - name: Install kubectl if: steps.changes.outputs.stack == 'true' @@ -353,7 +353,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==0.1.35 + pip install --upgrade pip uv==0.2.13 uv --version - name: Get pip cache dir @@ -375,7 +375,7 @@ jobs: - name: Install tox if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade tox tox-uv==1.5.1 + pip install --upgrade tox tox-uv==1.9.0 - name: Install kubectl if: steps.changes.outputs.stack == 'true' diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml index 425e3468438..d3bb7bdaad7 100644 --- a/.github/workflows/pr-tests-syft.yml +++ b/.github/workflows/pr-tests-syft.yml @@ -65,7 +65,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv==0.1.35 + pip install --upgrade pip uv==0.2.13 uv --version - name: Get pip cache dir @@ -93,7 +93,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox tox-uv==1.5.1 + pip install --upgrade tox tox-uv==1.9.0 - name: Run unit tests if: steps.changes.outputs.syft == 'true' @@ -160,7 +160,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade pip uv==0.1.35 + pip install --upgrade pip uv==0.2.13 uv --version - name: Get pip cache dir @@ -182,7 +182,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox tox-uv==1.5.1 + pip install --upgrade tox tox-uv==1.9.0 - name: Run notebook tests uses: nick-fields/retry@v3 @@ -242,7 +242,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade pip uv==0.1.35 + pip install --upgrade pip uv==0.2.13 uv --version - name: Get pip cache dir @@ -264,7 +264,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade tox tox-uv==1.5.1 + pip install --upgrade tox tox-uv==1.9.0 - name: Docker Compose on Linux if: (steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true') && matrix.os == 'ubuntu-latest' @@ -343,7 +343,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv==0.1.35 + pip install --upgrade pip uv==0.2.13 uv --version - name: Get pip cache dir @@ -365,7 +365,7 @@ jobs: - name: Install Dependencies if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade tox tox-uv==1.5.1 + pip install --upgrade tox tox-uv==1.9.0 - name: Scan for security issues if: steps.changes.outputs.syft == 'true' From 1d0f2fa7667447c8c9ad81701cbe406eef43cb14 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Fri, 21 Jun 2024 15:03:18 +0200 Subject: [PATCH 257/313] fix pip version --- .github/workflows/cd-docs.yml | 2 +- .github/workflows/cd-post-release-tests.yml | 4 ++-- .github/workflows/cd-syft-dev.yml | 2 +- .github/workflows/cd-syft.yml | 4 ++-- .github/workflows/cd-syftcli.yml | 4 ++-- .github/workflows/e2e-tests-notebook.yml | 2 +- .github/workflows/pr-tests-frontend.yml | 2 +- .github/workflows/pr-tests-helm-lint.yml | 2 +- .github/workflows/pr-tests-helm-upgrade.yml | 2 +- .github/workflows/pr-tests-linting.yml | 2 +- .github/workflows/pr-tests-stack.yml | 8 ++++---- .github/workflows/pr-tests-syft.yml | 8 ++++---- 12 files changed, 21 insertions(+), 21 deletions(-) diff --git a/.github/workflows/cd-docs.yml b/.github/workflows/cd-docs.yml index de0288df04c..0e741baacc6 100644 --- a/.github/workflows/cd-docs.yml +++ b/.github/workflows/cd-docs.yml @@ -27,7 +27,7 @@ jobs: - name: Install tox run: | - pip install --upgrade pip uv==0.2.13 tox tox-uv==1.9.0 + pip install --upgrade pip==24.0 uv==0.2.13 tox tox-uv==1.9.0 uv --version - name: Build the docs diff --git a/.github/workflows/cd-post-release-tests.yml b/.github/workflows/cd-post-release-tests.yml index 5ff48092b1e..adaafb47f9a 100644 --- a/.github/workflows/cd-post-release-tests.yml +++ b/.github/workflows/cd-post-release-tests.yml @@ -132,7 +132,7 @@ jobs: - name: Install tox run: | - pip install --upgrade pip uv==0.2.13 tox tox-uv==1.9.0 + pip install --upgrade pip==24.0 uv==0.2.13 tox tox-uv==1.9.0 - name: Run K8s tests env: @@ -193,7 +193,7 @@ jobs: - name: Install tox and uv run: | - pip install --upgrade pip uv==0.2.13 tox tox-uv==1.9.0 tox-current-env + pip install --upgrade pip==24.0 uv==0.2.13 tox tox-uv==1.9.0 tox-current-env - name: Run unit tests run: | diff --git a/.github/workflows/cd-syft-dev.yml b/.github/workflows/cd-syft-dev.yml index 0231f97e172..7ab86f9d89d 100644 --- a/.github/workflows/cd-syft-dev.yml +++ b/.github/workflows/cd-syft-dev.yml @@ -164,7 +164,7 @@ jobs: helm version # install tox - python -m pip install --upgrade pip + python -m pip install --upgrade pip==24.0 pip install tox tox -e syft.build.helm diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index 9895d28c5e7..5bb32744f4b 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -133,7 +133,7 @@ jobs: - name: Install dependencies run: | - pip install --upgrade pip uv==0.2.13 bump2version tox tox-uv==1.9.0 + pip install --upgrade pip==24.0 uv==0.2.13 bump2version tox tox-uv==1.9.0 uv --version - name: Get Release tag @@ -385,7 +385,7 @@ jobs: python-version: "3.12" - name: Install dependencies run: | - pip install --upgrade pip uv==0.2.13 tox tox-uv==1.9.0 setuptools wheel twine bump2version PyYAML + pip install --upgrade pip==24.0 uv==0.2.13 tox tox-uv==1.9.0 setuptools wheel twine bump2version PyYAML uv --version - name: Bump the Version diff --git a/.github/workflows/cd-syftcli.yml b/.github/workflows/cd-syftcli.yml index 65f2c37662e..7b29caea012 100644 --- a/.github/workflows/cd-syftcli.yml +++ b/.github/workflows/cd-syftcli.yml @@ -65,7 +65,7 @@ jobs: - name: Install dependencies if: ${{steps.get-hashes.outputs.current_hash != steps.get-hashes.outputs.previous_hash }} run: | - python -m pip install --upgrade pip + python -m pip install --upgrade pip==24.0 pip install --upgrade tox setuptools wheel twine bump2version PyYAML - name: Bump the Version @@ -121,7 +121,7 @@ jobs: - name: Install build dependencies for syftcli run: | - pip install --upgrade pip + pip install --upgrade pip==24.0 - name: Install Tox run: | diff --git a/.github/workflows/e2e-tests-notebook.yml b/.github/workflows/e2e-tests-notebook.yml index d2066e20fb6..ebf843f10a6 100644 --- a/.github/workflows/e2e-tests-notebook.yml +++ b/.github/workflows/e2e-tests-notebook.yml @@ -63,7 +63,7 @@ jobs: - name: Install Deps run: | - pip install --upgrade pip uv==0.2.13 tox tox-uv==1.9.0 + pip install --upgrade pip==24.0 uv==0.2.13 tox tox-uv==1.9.0 - name: Get pip cache dir id: pip-cache diff --git a/.github/workflows/pr-tests-frontend.yml b/.github/workflows/pr-tests-frontend.yml index 47fbbd7c377..99b6aa79f3c 100644 --- a/.github/workflows/pr-tests-frontend.yml +++ b/.github/workflows/pr-tests-frontend.yml @@ -46,7 +46,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.frontend == 'true' run: | - pip install --upgrade pip uv==0.2.13 + pip install --upgrade pip==24.0 uv==0.2.13 uv --version - name: Get pip cache dir diff --git a/.github/workflows/pr-tests-helm-lint.yml b/.github/workflows/pr-tests-helm-lint.yml index 1ef21e5a5f9..c55479f9f7f 100644 --- a/.github/workflows/pr-tests-helm-lint.yml +++ b/.github/workflows/pr-tests-helm-lint.yml @@ -33,7 +33,7 @@ jobs: brew install kube-linter FairwindsOps/tap/polaris # Install python deps - pip install --upgrade pip + pip install --upgrade pip==24.0 pip install tox kube-linter version diff --git a/.github/workflows/pr-tests-helm-upgrade.yml b/.github/workflows/pr-tests-helm-upgrade.yml index be8bbc21996..0419a125369 100644 --- a/.github/workflows/pr-tests-helm-upgrade.yml +++ b/.github/workflows/pr-tests-helm-upgrade.yml @@ -37,7 +37,7 @@ jobs: brew update # Install python deps - pip install --upgrade pip + pip install --upgrade pip==24.0 pip install tox # Install kubernetes diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index f8759a81f6d..c2b71998087 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -29,7 +29,7 @@ jobs: - name: Install pip packages run: | - pip install --upgrade pip uv==0.2.13 + pip install --upgrade pip==24.0 uv==0.2.13 uv --version - name: Get pip cache dir diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index a813474e70c..b0aefd3f23c 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -52,7 +52,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==0.2.13 + pip install --upgrade pip==24.0 uv==0.2.13 uv --version - name: Get pip cache dir @@ -113,7 +113,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==0.2.13 + pip install --upgrade pip==24.0 uv==0.2.13 uv --version - name: Get pip cache dir @@ -200,7 +200,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==0.2.13 + pip install --upgrade pip==24.0 uv==0.2.13 uv --version - name: Get pip cache dir @@ -353,7 +353,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' run: | - pip install --upgrade pip uv==0.2.13 + pip install --upgrade pip==24.0 uv==0.2.13 uv --version - name: Get pip cache dir diff --git a/.github/workflows/pr-tests-syft.yml b/.github/workflows/pr-tests-syft.yml index d3bb7bdaad7..e416700f149 100644 --- a/.github/workflows/pr-tests-syft.yml +++ b/.github/workflows/pr-tests-syft.yml @@ -65,7 +65,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv==0.2.13 + pip install --upgrade pip==24.0 uv==0.2.13 uv --version - name: Get pip cache dir @@ -160,7 +160,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade pip uv==0.2.13 + pip install --upgrade pip==24.0 uv==0.2.13 uv --version - name: Get pip cache dir @@ -242,7 +242,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.stack == 'true' || steps.changes.outputs.notebooks == 'true' run: | - pip install --upgrade pip uv==0.2.13 + pip install --upgrade pip==24.0 uv==0.2.13 uv --version - name: Get pip cache dir @@ -343,7 +343,7 @@ jobs: - name: Upgrade pip if: steps.changes.outputs.syft == 'true' run: | - pip install --upgrade pip uv==0.2.13 + pip install --upgrade pip==24.0 uv==0.2.13 uv --version - name: Get pip cache dir From fe6b0d9f7482eadce4f05ce5c83d1ba84b64b3e7 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Fri, 21 Jun 2024 15:17:14 +0200 Subject: [PATCH 258/313] deny failed jobs during sync --- notebooks/Bigquery full flow.ipynb | 2742 ++++++++++++++++- .../src/syft/protocol/protocol_version.json | 4 +- .../syft/service/request/request_service.py | 9 + .../src/syft/service/request/request_stash.py | 13 + 4 files changed, 2716 insertions(+), 52 deletions(-) diff --git a/notebooks/Bigquery full flow.ipynb b/notebooks/Bigquery full flow.ipynb index ff479dd2bdd..b30352c8a88 100644 --- a/notebooks/Bigquery full flow.ipynb +++ b/notebooks/Bigquery full flow.ipynb @@ -15,7 +15,8 @@ "metadata": {}, "outputs": [], "source": [ - "# stdlib\n", + "# third party\n", + "from result import Err\n", "\n", "# syft absolute\n", "import syft as sy\n", @@ -110,11 +111,43 @@ " print(f\"Started {len(jobs_by_request)} new jobs\")\n", "\n", "\n", + "def deny_failed_jobs(\n", + " client_low: DomainClient,\n", + " jobs: list[Job],\n", + ") -> None:\n", + " # NOTE no syncing is needed, requests are denied on the low side\n", + " denied_requests = []\n", + "\n", + " for job in jobs:\n", + " if job.status != JobStatus.ERRORED:\n", + " continue\n", + "\n", + " error_result = job.result\n", + " if isinstance(error_result, Err):\n", + " error_msg = error_result.err_value\n", + " else:\n", + " error_msg = \"An unknown error occurred, please check the Job logs for more information.\"\n", + "\n", + " code_id = job.user_code_id\n", + " if code_id is None:\n", + " continue\n", + " requests = client_low.requests.get_by_usercode_id(code_id)\n", + " if isinstance(requests, list) and len(requests) > 0:\n", + " request = requests[0]\n", + " request.deny(reason=f\"Execution failed: {error_msg}\")\n", + " denied_requests.append(request.id)\n", + " else:\n", + " print(f\"Failed to deny request for job {job.id}\")\n", + "\n", + " print(f\"Denied {len(denied_requests)} failed requests\")\n", + "\n", + "\n", "def sync_finished_jobs(\n", " client_low: DomainClient,\n", " client_high: DomainClient,\n", ") -> dict[sy.UID, sy.SyftError | sy.SyftSuccess] | sy.SyftError:\n", " sync_job_results = {}\n", + " synced_jobs = []\n", " diff = compare_clients(\n", " from_client=client_high, to_client=client_low, include_types=[\"job\"]\n", " )\n", @@ -124,16 +157,20 @@ "\n", " for batch in diff.batches:\n", " if is_job_to_sync(batch):\n", - " batch_id = batch.root.high_obj.id\n", + " job = batch.root.high_obj\n", + "\n", " w = batch.resolve()\n", " share_result = w.click_share_all_private_data()\n", " if isinstance(share_result, sy.SyftError):\n", - " sync_job_results[batch_id] = share_result\n", + " sync_job_results[job.id] = share_result\n", " continue\n", " sync_result = w.click_sync()\n", - " sync_job_results[batch_id] = sync_result\n", + "\n", + " synced_jobs.append(job)\n", + " sync_job_results[job.id] = sync_result\n", "\n", " print(f\"Sharing {len(sync_job_results)} new results\")\n", + " deny_failed_jobs(client_low, synced_jobs)\n", " return sync_job_results\n", "\n", "\n", @@ -159,8 +196,7 @@ "metadata": {}, "outputs": [], "source": [ - "# third party\n", - "from google.oauth2 import service_account" + "# third party" ] }, { @@ -177,14 +213,14 @@ "Document Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", "Action Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", "Creating default worker image with tag='local-dev'\n", - "Setting up worker poolname=default-pool workers=1 image_uid=b5fa6320676a4ba78a4dc18fd1abd9ac in_memory=True\n", + "Setting up worker poolname=default-pool workers=1 image_uid=b17c03fde6b2414d927aea2f6f059803 in_memory=True\n", "Created default worker pool.\n", "Data Migrated to latest version !!!\n", "Staging Protocol Changes...\n", "Document Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", "Action Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", "Creating default worker image with tag='local-dev'\n", - "Setting up worker poolname=default-pool workers=4 image_uid=f76e1c51f2b74d73b6da83e4ef07008a in_memory=True\n", + "Setting up worker poolname=default-pool workers=4 image_uid=aa04852544c343f4adc902bac7baeee9 in_memory=True\n", "Created default worker pool.\n", "Data Migrated to latest version !!!\n" ] @@ -299,7 +335,7 @@ "# stdlib\n", "import json\n", "\n", - "with open(\"./credentials.json\") as f:\n", + "with open(\"./experimental/credentials.json\") as f:\n", " BQ_CREDENTIALS = json.loads(f.read())" ] }, @@ -371,6 +407,7 @@ "\n", " # third party\n", " from google.cloud import bigquery\n", + " from google.oauth2 import service_account\n", "\n", " # syft absolute\n", " from syft.service.response import SyftError\n", @@ -442,10 +479,10 @@ { "data": { "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" ], "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" ] }, "metadata": {}, @@ -454,7 +491,7 @@ { "data": { "text/html": [ - "" + "" ], "text/plain": [ "" @@ -608,27 +645,2564 @@ { "cell_type": "code", "execution_count": 12, + "id": "a08fe22f-0ac0-4bb2-a054-e302b8f6a2fa", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Logged into as GUEST\n", + "Logged into as GUEST\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftSuccess:
Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" + ], + "text/plain": [ + "SyftSuccess: Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "submit_res = client_low_ds.api.services.reddit.submit_query(\n", + " func_name=\"myquerya\", query=\"malformed query\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 13, "id": "dd60a815-e435-450e-9528-ac2bfbc9ee62", "metadata": {}, "outputs": [ { "data": { + "text/html": [ + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "\n", + "
\n", + "
\n", + " \n", + "
\n", + "

Request List

\n", + "
\n", + "
\n", + "
\n", + " \n", + "
\n", + "
\n", + "

Total: 0

\n", + "
\n", + "
\n", + "
\n", + "\n", + "\n", + "\n", + "" + ], "text/plain": [ - "['autosync']" + "" ] }, - "execution_count": 12, + "execution_count": 13, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "client_low.requests[0].tags" + "client_low.requests" ] }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 14, "id": "a7573008-bb4f-43b6-84da-bf797ec9dac6", "metadata": {}, "outputs": [], @@ -646,7 +3220,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 15, "id": "e518ba33-9d2a-40a2-a6b8-174bc8392c77", "metadata": {}, "outputs": [ @@ -660,10 +3234,58 @@ { "data": { "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" ], "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Decision: Syncing 2 objects\n", + "Decision: Syncing 2 objects\n", + "Synced 2 new requests\n", + "Started 2 new jobs\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" + ], + "text/plain": [ + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sharing 0 new results\n", + "Denied 0 failed requests\n", + "Finished auto sync\n", + "Logged into as GUEST\n", + "Logged into as GUEST\n", + "Logged into as GUEST\n", + "Logged into as GUEST\n", + "Starting auto sync\n" + ] + }, + { + "data": { + "text/html": [ + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" + ], + "text/plain": [ + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" ] }, "metadata": {}, @@ -680,10 +3302,10 @@ { "data": { "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" ], "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" ] }, "metadata": {}, @@ -694,7 +3316,9 @@ "output_type": "stream", "text": [ "Decision: Syncing 5 objects\n", - "Sharing 1 new results\n", + "Decision: Syncing 3 objects\n", + "Sharing 2 new results\n", + "Denied 1 failed requests\n", "Finished auto sync\n", "Starting auto sync\n" ] @@ -702,10 +3326,10 @@ { "data": { "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" ], "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" ] }, "metadata": {}, @@ -722,10 +3346,10 @@ { "data": { "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" ], "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" ] }, "metadata": {}, @@ -736,6 +3360,7 @@ "output_type": "stream", "text": [ "Sharing 0 new results\n", + "Denied 0 failed requests\n", "Finished auto sync\n", "Starting auto sync\n" ] @@ -743,10 +3368,10 @@ { "data": { "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" ], "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" ] }, "metadata": {}, @@ -763,10 +3388,10 @@ { "data": { "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" ], "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" ] }, "metadata": {}, @@ -777,6 +3402,7 @@ "output_type": "stream", "text": [ "Sharing 0 new results\n", + "Denied 0 failed requests\n", "Finished auto sync\n", "Starting auto sync\n" ] @@ -784,10 +3410,10 @@ { "data": { "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" ], "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" ] }, "metadata": {}, @@ -804,10 +3430,10 @@ { "data": { "text/html": [ - "
SyftWarning:
User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False.

" + "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" ], "text/plain": [ - "SyftWarning: User code is hidden by default, as they are also part of the Request. If you want to include them, set hide_usercode=False." + "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" ] }, "metadata": {}, @@ -818,6 +3444,7 @@ "output_type": "stream", "text": [ "Sharing 0 new results\n", + "Denied 0 failed requests\n", "Finished auto sync\n" ] } @@ -833,7 +3460,7 @@ " auto_sync(client_low, client_high)\n", " except Exception as e:\n", " print(e)\n", - " time.sleep(5)" + " time.sleep(2)" ] }, { @@ -846,7 +3473,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 17, "id": "f92240e6-0c35-4cd3-b52d-a5fa3256e3f9", "metadata": {}, "outputs": [ @@ -861,18 +3488,7 @@ }, "metadata": {}, "output_type": "display_data" - } - ], - "source": [ - "res = client_low_ds.code.myquery()" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "23050872-2f0b-4d23-9c1f-57ad74ac09d5", - "metadata": {}, - "outputs": [ + }, { "data": { "text/html": [ @@ -1043,13 +3659,39 @@ "4 /r/t5_y71mw/comments/oygwavx/comment/8nkh2zb " ] }, - "execution_count": 21, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "client_low_ds.code.myquery().get().head()" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "23050872-2f0b-4d23-9c1f-57ad74ac09d5", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftError:
 Your code cannot be run: Code status on node 'auto-sync-low' is 'UserCodeStatus.DENIED'. Reason: Execution failed: An error was raised during the execution of the API endpoint call: \n",
+       " 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/reddit-testing-415005/queries?prettyPrint=false: Syntax error: Unexpected identifier \"malformed\" at [1:1].

" + ], + "text/plain": [ + "SyftError: Your code cannot be run: Code status on node 'auto-sync-low' is 'UserCodeStatus.DENIED'. Reason: Execution failed: An error was raised during the execution of the API endpoint call: \n", + " 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/reddit-testing-415005/queries?prettyPrint=false: Syntax error: Unexpected identifier \"malformed\" at [1:1]." + ] + }, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "res.get().head()" + "client_low_ds.code.myquerya()" ] }, { diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 035cc0a0881..9ff9826a945 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -316,7 +316,7 @@ "CreatePolicyRuleConstant": { "1": { "version": 1, - "hash": "51e9d217009b7eb4a1058b06533b18652b33556e145f90263cf58a0edb6de7dd", + "hash": "9e821ddd383b6472f95dad2b56ebaefad225ff49c96b89b4ce35dc99c422ba76", "action": "add" } }, @@ -330,7 +330,7 @@ "PreFill": { "1": { "version": 1, - "hash": "090969ee54568265d192b17548dcc5d40faf5a55a02af331ff3d9e3cbf4eb958", + "hash": "22c38b8ad68409493810362e6c48822d3e2919760f26eba2d1de3f2ad394e314", "action": "add" } }, diff --git a/packages/syft/src/syft/service/request/request_service.py b/packages/syft/src/syft/service/request/request_service.py index 9d0bead7089..919effe4fcc 100644 --- a/packages/syft/src/syft/service/request/request_service.py +++ b/packages/syft/src/syft/service/request/request_service.py @@ -334,6 +334,15 @@ def set_tags( request.tags = tags return self.save(context, request) + @service_method(path="request.get_by_usercode_id", name="get_by_usercode_id") + def get_by_usercode_id( + self, context: AuthedServiceContext, usercode_id: UID + ) -> list[Request] | SyftError: + result = self.stash.get_by_usercode_id(context.credentials, usercode_id) + if result.is_err(): + return SyftError(message=str(result.err())) + return result.ok() + TYPE_TO_SERVICE[Request] = RequestService SERVICE_TO_TYPES[RequestService].update({Request}) diff --git a/packages/syft/src/syft/service/request/request_stash.py b/packages/syft/src/syft/service/request/request_stash.py index 5b8fe3e08c5..dedee590357 100644 --- a/packages/syft/src/syft/service/request/request_stash.py +++ b/packages/syft/src/syft/service/request/request_stash.py @@ -1,6 +1,7 @@ # stdlib # third party +from result import Ok from result import Result # relative @@ -11,6 +12,7 @@ from ...store.document_store import PartitionSettings from ...store.document_store import QueryKeys from ...types.datetime import DateTime +from ...types.uid import UID from ...util.telemetry import instrument from .request import Request @@ -42,3 +44,14 @@ def get_all_for_verify_key( qks=qks, order_by=OrderByRequestTimeStampPartitionKey, ) + + def get_by_usercode_id( + self, credentials: SyftVerifyKey, user_code_id: UID + ) -> Result[list[Request], str]: + query = self.get_all(credentials=credentials) + if query.is_err(): + return query + + all_requests: list[Request] = query.ok() + results = [r for r in all_requests if r.code_id == user_code_id] + return Ok(results) From eb3c196629be87835db3534756e516f18fcba2dd Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 15:34:50 +0200 Subject: [PATCH 259/313] fix: sanitize support for jobs --- packages/syft/src/syft/util/patch_ipython.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index 5e47ac5ef98..98666d62641 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -69,14 +69,19 @@ def _patch_ipython_sanitization() -> None: table_template = re.sub(r"\\{\\{.*?\\}\\}", ".*?", re.escape(table_template)) escaped_template = re.compile(table_template, re.DOTALL | re.VERBOSE) + jobs_repr_template = r'(.*?)' + jobs_pattern = re.compile(jobs_repr_template, re.DOTALL) + def display_sanitized_html(obj: SyftObject | DictTuple) -> str | None: if callable(getattr(obj, "_repr_html_", None)): html_str = obj._repr_html_() if html_str is not None: - matching_template = escaped_template.findall(html_str) - template = "\n".join(matching_template) + matching_table = escaped_template.findall(html_str) + matching_jobs = jobs_pattern.findall(html_str) + template = "\n".join(matching_table + matching_jobs) sanitized_str = escaped_template.sub("", html_str) sanitized_str = escaped_js_css.sub("", sanitized_str) + sanitized_str = jobs_pattern.sub("", html_str) sanitized_str = sanitize_html(sanitized_str) return f"{css_reinsert} {sanitized_str} {template}" return None From d0c378e5c179885c9e988ec0cbd771462058d4a3 Mon Sep 17 00:00:00 2001 From: Thiago Costa Porto Date: Fri, 21 Jun 2024 15:35:24 +0200 Subject: [PATCH 260/313] lint --- packages/syft/src/syft/util/patch_ipython.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/util/patch_ipython.py b/packages/syft/src/syft/util/patch_ipython.py index 98666d62641..4910eb77d52 100644 --- a/packages/syft/src/syft/util/patch_ipython.py +++ b/packages/syft/src/syft/util/patch_ipython.py @@ -69,7 +69,9 @@ def _patch_ipython_sanitization() -> None: table_template = re.sub(r"\\{\\{.*?\\}\\}", ".*?", re.escape(table_template)) escaped_template = re.compile(table_template, re.DOTALL | re.VERBOSE) - jobs_repr_template = r'(.*?)' + jobs_repr_template = ( + r"(.*?)" + ) jobs_pattern = re.compile(jobs_repr_template, re.DOTALL) def display_sanitized_html(obj: SyftObject | DictTuple) -> str | None: From 6359e93b71a961f09450ea226f610be452a6b9c7 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Fri, 21 Jun 2024 15:39:37 +0200 Subject: [PATCH 261/313] deny other requests --- notebooks/Bigquery full flow.ipynb | 136 ++++++++++++++++++++++++----- 1 file changed, 116 insertions(+), 20 deletions(-) diff --git a/notebooks/Bigquery full flow.ipynb b/notebooks/Bigquery full flow.ipynb index b30352c8a88..18398cdf355 100644 --- a/notebooks/Bigquery full flow.ipynb +++ b/notebooks/Bigquery full flow.ipynb @@ -26,9 +26,30 @@ "from syft.service.job.job_stash import Job\n", "from syft.service.job.job_stash import JobStatus\n", "from syft.service.request.request import Request\n", + "from syft.service.request.request import RequestStatus\n", "from syft.service.sync.diff_state import ObjectDiffBatch\n", "\n", "\n", + "def deny_requests_without_autosync_tag(client_low: DomainClient):\n", + " # Deny all requests that are not autosync\n", + " requests = client_low.requests.get_all()\n", + " if isinstance(requests, sy.SyftError):\n", + " print(requests)\n", + " return\n", + "\n", + " denied_requests = []\n", + " for request in requests:\n", + " if request.status != RequestStatus.PENDING:\n", + " continue\n", + " if \"autosync\" not in request.tags:\n", + " request.deny(\n", + " reason=\"This request has been denied automatically. \"\n", + " \"Please use the designated API to submit your request.\"\n", + " )\n", + " denied_requests.append(request.id)\n", + " print(f\"Denied {len(denied_requests)} requests without autosync tag\")\n", + "\n", + "\n", "def is_request_to_sync(batch: ObjectDiffBatch) -> bool:\n", " # True if this is a new low-side request\n", " # TODO add condition for sql requests/usercodes\n", @@ -176,6 +197,7 @@ "\n", "def auto_sync(client_low: DomainClient, client_high: DomainClient) -> None:\n", " print(\"Starting auto sync\")\n", + " deny_requests_without_autosync_tag(client_low)\n", " sync_and_execute_new_requests(client_low, client_high)\n", " sync_finished_jobs(client_low, client_high)\n", " print(\"Finished auto sync\")" @@ -213,14 +235,14 @@ "Document Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", "Action Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/579f2ebaf61545e4bead94c215ea3f88/db/579f2ebaf61545e4bead94c215ea3f88.sqlite\n", "Creating default worker image with tag='local-dev'\n", - "Setting up worker poolname=default-pool workers=1 image_uid=b17c03fde6b2414d927aea2f6f059803 in_memory=True\n", + "Setting up worker poolname=default-pool workers=1 image_uid=78f25e1019c847a7bcbe988937791e3f in_memory=True\n", "Created default worker pool.\n", "Data Migrated to latest version !!!\n", "Staging Protocol Changes...\n", "Document Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", "Action Store's SQLite DB path: /var/folders/pn/f6xkq7mx683g5jkyt91gqyzw0000gn/T/syft/083dfc0ecd744d17ad21a36a6477565e/db/083dfc0ecd744d17ad21a36a6477565e.sqlite\n", "Creating default worker image with tag='local-dev'\n", - "Setting up worker poolname=default-pool workers=4 image_uid=aa04852544c343f4adc902bac7baeee9 in_memory=True\n", + "Setting up worker poolname=default-pool workers=4 image_uid=1ad2461599ba4a25a408cfd5a0a4ca8d in_memory=True\n", "Created default worker pool.\n", "Data Migrated to latest version !!!\n" ] @@ -491,7 +513,7 @@ { "data": { "text/html": [ - "" + "" ], "text/plain": [ "" @@ -678,6 +700,75 @@ { "cell_type": "code", "execution_count": 13, + "id": "fdb97c18-9d58-4fd4-a503-4bdb73953214", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess:
Syft function 'other_function' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" + ], + "text/plain": [ + "SyftSuccess: Syft function 'other_function' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "\n", + " \n", + "
\n", + "

Request

\n", + "

Id: b7c889a4148043438788c239b955de6d

\n", + "

Request time: 2024-06-21 11:35:04

\n", + " \n", + " \n", + "

Status: RequestStatus.PENDING

\n", + "

Requested on: Auto-sync-low of type Domain

\n", + "

Requested by: John Doe (newuser@openmined.org)

\n", + "

Changes: Request to change other_function (Pool Id: default-pool) to permission RequestStatus.APPROVED. No nested requests.

\n", + "
\n", + "\n", + " " + ], + "text/markdown": [ + "```python\n", + "class Request:\n", + " id: str = b7c889a4148043438788c239b955de6d\n", + " request_time: str = 2024-06-21 11:35:04\n", + " updated_at: str = None\n", + " status: str = RequestStatus.PENDING\n", + " changes: str = ['Request to change other_function (Pool Id: default-pool) to permission RequestStatus.APPROVED. No nested requests']\n", + " requesting_user_verify_key: str = 8892498fb1bc76b7086a4d5c99a8212d8578e1e506b68e7e3e51636232a8da58\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.request.request.Request" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "@sy.syft_function_single_use()\n", + "def other_function():\n", + " return 10\n", + "\n", + "\n", + "client_low_ds.code.request_code_execution(other_function)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, "id": "dd60a815-e435-450e-9528-ac2bfbc9ee62", "metadata": {}, "outputs": [ @@ -2353,7 +2444,7 @@ "/*# sourceMappingURL=tabulator_pysyft.min.css.map */\n", "\n", "\n", - "
\n", + "
\n", "\n", "
\n", "
\n", @@ -2366,16 +2457,16 @@ "
\n", "
\n", " \n", "
\n", "
\n", - "

Total: 0

\n", + "

Total: 0

\n", "
\n", "
\n", - "
\n", + "
\n", "\n", "" + "" ], "text/plain": [ "" @@ -572,11 +576,6 @@ " # syft absolute\n", " import syft as sy\n", "\n", - " if not func_name.isalpha():\n", - " return sy.SyftError(\n", - " message=\"Please only use alphabetic characters for your func_name\"\n", - " )\n", - "\n", " @sy.syft_function(\n", " name=func_name,\n", " input_policy=sy.MixedInputPolicy(\n", @@ -637,14 +636,6 @@ "id": "027d4479-0f47-4647-9c00-7b7d87f6a80f", "metadata": {}, "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as GUEST\n", - "Logged into as GUEST\n" - ] - }, { "data": { "text/html": [ @@ -660,7 +651,7 @@ ], "source": [ "submit_res = client_low_ds.api.services.reddit.submit_query(\n", - " func_name=\"myquery\", query=\"SELECT * from data_10gb.comments LIMIT 40\"\n", + " func_name=\"myquery\", query=\"SELECT * from data_10gb.comments LIMIT 1000000\"\n", ")" ] }, @@ -670,14 +661,6 @@ "id": "a08fe22f-0ac0-4bb2-a054-e302b8f6a2fa", "metadata": {}, "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Logged into as GUEST\n", - "Logged into as GUEST\n" - ] - }, { "data": { "text/html": [ @@ -724,8 +707,8 @@ " \n", "
\n", "

Request

\n", - "

Id: b7c889a4148043438788c239b955de6d

\n", - "

Request time: 2024-06-21 11:35:04

\n", + "

Id: f842bc5607de4e26b43de48f7d06ebac

\n", + "

Request time: 2024-06-21 11:52:18

\n", " \n", " \n", "

Status: RequestStatus.PENDING

\n", @@ -739,12 +722,12 @@ "text/markdown": [ "```python\n", "class Request:\n", - " id: str = b7c889a4148043438788c239b955de6d\n", - " request_time: str = 2024-06-21 11:35:04\n", + " id: str = f842bc5607de4e26b43de48f7d06ebac\n", + " request_time: str = 2024-06-21 11:52:18\n", " updated_at: str = None\n", " status: str = RequestStatus.PENDING\n", " changes: str = ['Request to change other_function (Pool Id: default-pool) to permission RequestStatus.APPROVED. No nested requests']\n", - " requesting_user_verify_key: str = 8892498fb1bc76b7086a4d5c99a8212d8578e1e506b68e7e3e51636232a8da58\n", + " requesting_user_verify_key: str = fd83cdbfae9c4f43be075029c9a57ffaa1d85618f894213a1b279cc7e5b16897\n", "\n", "```" ], @@ -2444,7 +2427,7 @@ "/*# sourceMappingURL=tabulator_pysyft.min.css.map */\n", "\n", "\n", - "
\n", + "
\n", "\n", "
\n", "
\n", @@ -2457,16 +2440,16 @@ "
\n", "
\n", " \n", "
\n", "
\n", - "

Total: 0

\n", + "

Total: 0

\n", "
\n", "
\n", - "
\n", + "
\n", "\n", "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Decision: Syncing 1 objects\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftSuccess:
Synced 1 items

" - ], - "text/plain": [ - "SyftSuccess: Synced 1 items" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "widget = compare_clients(from_client=client_high, to_client=client_low).resolve()\n", - "widget.click_sync(0)" - ] - }, - { - "cell_type": "markdown", - "id": "b85cbfbd-95c4-4ceb-8bf1-dd80b996d559", - "metadata": {}, - "source": [ - "# Create Function factory" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "2e3f599f-86ed-4cb8-ae7a-170f5fd59ef8", - "metadata": {}, - "outputs": [], - "source": [ - "@sy.api_endpoint(path=\"reddit.submit_query\")\n", - "def submit_query(\n", - " context,\n", - " func_name: str,\n", - " query: str,\n", - ") -> str:\n", - " # syft absolute\n", - " import syft as sy\n", - "\n", - " @sy.syft_function(\n", - " name=func_name,\n", - " input_policy=sy.MixedInputPolicy(\n", - " endpoint=sy.Constant(val=context.admin_client.api.services.reddit.query),\n", - " query=sy.Constant(val=query),\n", - " client=context.admin_client,\n", - " ),\n", - " )\n", - " def execute_query(query: str, endpoint):\n", - " res = endpoint.private(sql_query=query)\n", - " return res\n", - "\n", - " request = context.user_client.code.request_code_execution(execute_query)\n", - " if isinstance(request, sy.SyftError):\n", - " return request\n", - " context.admin_client.requests.set_tags(request, [\"autosync\"])\n", - "\n", - " return (\n", - " f\"Query submitted {request}, use `client.code.{func_name}()` to run your query\"\n", - " )" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "02c6287f-322e-469f-a2ae-666fc17c6dac", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess:
Endpoint successfully created.

" - ], - "text/plain": [ - "SyftSuccess: Endpoint successfully created." - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client_low.api.services.api.add(endpoint=submit_query)" - ] - }, - { - "cell_type": "markdown", - "id": "46666113-d77a-4eb3-9154-63b4dc207d83", - "metadata": {}, - "source": [ - "# Submit request" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "027d4479-0f47-4647-9c00-7b7d87f6a80f", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess:
Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "submit_res = client_low_ds.api.services.reddit.submit_query(\n", - " func_name=\"myquery\", query=\"SELECT * from data_10gb.comments LIMIT 1000000\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "a08fe22f-0ac0-4bb2-a054-e302b8f6a2fa", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess:
Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'execute_query' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "submit_res = client_low_ds.api.services.reddit.submit_query(\n", - " func_name=\"myquerya\", query=\"malformed query\"\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "fdb97c18-9d58-4fd4-a503-4bdb73953214", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftSuccess:
Syft function 'other_function' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`.

" - ], - "text/plain": [ - "SyftSuccess: Syft function 'other_function' successfully created. To add a code request, please create a project using `project = syft.Project(...)`, then use command `project.create_code_request`." - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "\n", - " \n", - "
\n", - "

Request

\n", - "

Id: f842bc5607de4e26b43de48f7d06ebac

\n", - "

Request time: 2024-06-21 11:52:18

\n", - " \n", - " \n", - "

Status: RequestStatus.PENDING

\n", - "

Requested on: Auto-sync-low of type Domain

\n", - "

Requested by: John Doe (newuser@openmined.org)

\n", - "

Changes: Request to change other_function (Pool Id: default-pool) to permission RequestStatus.APPROVED. No nested requests.

\n", - "
\n", - "\n", - " " - ], - "text/markdown": [ - "```python\n", - "class Request:\n", - " id: str = f842bc5607de4e26b43de48f7d06ebac\n", - " request_time: str = 2024-06-21 11:52:18\n", - " updated_at: str = None\n", - " status: str = RequestStatus.PENDING\n", - " changes: str = ['Request to change other_function (Pool Id: default-pool) to permission RequestStatus.APPROVED. No nested requests']\n", - " requesting_user_verify_key: str = fd83cdbfae9c4f43be075029c9a57ffaa1d85618f894213a1b279cc7e5b16897\n", - "\n", - "```" - ], - "text/plain": [ - "syft.service.request.request.Request" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "@sy.syft_function_single_use()\n", - "def other_function():\n", - " return 10\n", - "\n", - "\n", - "client_low_ds.code.request_code_execution(other_function)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "dd60a815-e435-450e-9528-ac2bfbc9ee62", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "\n", - "
\n", - "
\n", - " \n", - "
\n", - "

Request List

\n", - "
\n", - "
\n", - "
\n", - " \n", - "
\n", - "
\n", - "

Total: 0

\n", - "
\n", - "
\n", - "
\n", - "\n", - "\n", - "\n", - "" - ], - "text/plain": [ - "" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client_low.requests" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "a7573008-bb4f-43b6-84da-bf797ec9dac6", - "metadata": {}, - "outputs": [], - "source": [ - "# client_low_ds.code.myquery()" - ] - }, - { - "cell_type": "markdown", - "id": "a35540cc-eb17-4a83-9fea-a6823573e404", - "metadata": {}, - "source": [ - "# Run Autosync" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "e518ba33-9d2a-40a2-a6b8-174bc8392c77", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Starting auto sync\n", - "Denied 1 requests without autosync tag\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" - ], - "text/plain": [ - "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Decision: Syncing 2 objects\n", - "Decision: Syncing 2 objects\n", - "Synced 2 new requests\n", - "Started 2 new jobs\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" - ], - "text/plain": [ - "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Sharing 0 new results\n", - "Denied 0 failed requests\n", - "Finished auto sync\n", - "Starting auto sync\n", - "Denied 0 requests without autosync tag\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" - ], - "text/plain": [ - "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Synced 0 new requests\n", - "Started 0 new jobs\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" - ], - "text/plain": [ - "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Decision: Syncing 3 objects\n", - "Sharing 1 new results\n", - "Denied 1 failed requests\n", - "Finished auto sync\n", - "Starting auto sync\n", - "Denied 0 requests without autosync tag\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" - ], - "text/plain": [ - "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Synced 0 new requests\n", - "Started 0 new jobs\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" - ], - "text/plain": [ - "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Sharing 0 new results\n", - "Denied 0 failed requests\n", - "Finished auto sync\n", - "Starting auto sync\n", - "Denied 0 requests without autosync tag\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" - ], - "text/plain": [ - "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Synced 0 new requests\n", - "Started 0 new jobs\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" - ], - "text/plain": [ - "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Decision: Syncing 3 objects\n", - "Sharing 1 new results\n", - "Denied 1 failed requests\n", - "Finished auto sync\n", - "Starting auto sync\n", - "Denied 0 requests without autosync tag\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" - ], - "text/plain": [ - "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Synced 0 new requests\n", - "Started 0 new jobs\n" - ] - }, - { - "data": { - "text/html": [ - "
SyftWarning:
UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`

" - ], - "text/plain": [ - "SyftWarning: UserCodes are hidden by default, and are part of the Requests. If you want to include them as separate objects, set `hide_usercode=False`" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Sharing 0 new results\n", - "Denied 0 failed requests\n", - "Finished auto sync\n" - ] - } - ], - "source": [ - "# stdlib\n", - "import time\n", - "\n", - "# sync every 5 seconds\n", - "\n", - "for _ in range(5):\n", - " try:\n", - " auto_sync(client_low, client_high)\n", - " except Exception as e:\n", - " print(e)\n", - " time.sleep(2)" - ] - }, - { - "cell_type": "markdown", - "id": "8a3b3ec5-2999-4665-a754-d43bef11b05d", - "metadata": {}, - "source": [ - "# Run function as DS" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "1cc95982-0439-4821-a05d-9d1b77693973", - "metadata": {}, - "outputs": [], - "source": [ - "res = client_low_ds.code.myquery()" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "a34e9e44-4084-4668-aac2-4fd6113ceb13", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftError:
 Your code cannot be run: Code status on node 'auto-sync-low' is 'UserCodeStatus.DENIED'. Reason: Execution failed: Please only write queries that gather aggregate statistics.

" - ], - "text/plain": [ - "SyftError: Your code cannot be run: Code status on node 'auto-sync-low' is 'UserCodeStatus.DENIED'. Reason: Execution failed: Please only write queries that gather aggregate statistics." - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "res" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "f92240e6-0c35-4cd3-b52d-a5fa3256e3f9", - "metadata": {}, - "outputs": [ - { - "ename": "AttributeError", - "evalue": "'SyftError' object has no attribute 'get'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[18], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mclient_low_ds\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcode\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmyquery\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m()\u001b[38;5;241m.\u001b[39mhead()\n", - "File \u001b[0;32m/opt/anaconda3/envs/syft/lib/python3.12/site-packages/pydantic/main.py:767\u001b[0m, in \u001b[0;36mBaseModel.__getattr__\u001b[0;34m(self, item)\u001b[0m\n\u001b[1;32m 764\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28msuper\u001b[39m()\u001b[38;5;241m.\u001b[39m\u001b[38;5;21m__getattribute__\u001b[39m(item) \u001b[38;5;66;03m# Raises AttributeError if appropriate\u001b[39;00m\n\u001b[1;32m 765\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 766\u001b[0m \u001b[38;5;66;03m# this is the current error\u001b[39;00m\n\u001b[0;32m--> 767\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAttributeError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mtype\u001b[39m(\u001b[38;5;28mself\u001b[39m)\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;132;01m!r}\u001b[39;00m\u001b[38;5;124m object has no attribute \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mitem\u001b[38;5;132;01m!r}\u001b[39;00m\u001b[38;5;124m'\u001b[39m)\n", - "\u001b[0;31mAttributeError\u001b[0m: 'SyftError' object has no attribute 'get'" - ] - } - ], - "source": [ - "res.get().head()" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "id": "23050872-2f0b-4d23-9c1f-57ad74ac09d5", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
SyftError:
 Your code cannot be run: Code status on node 'auto-sync-low' is 'UserCodeStatus.DENIED'. Reason: Execution failed: An error was raised during the execution of the API endpoint call: \n",
-       " 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/reddit-testing-415005/queries?prettyPrint=false: Syntax error: Unexpected identifier \"malformed\" at [1:1].

" - ], - "text/plain": [ - "SyftError: Your code cannot be run: Code status on node 'auto-sync-low' is 'UserCodeStatus.DENIED'. Reason: Execution failed: An error was raised during the execution of the API endpoint call: \n", - " 400 POST https://bigquery.googleapis.com/bigquery/v2/projects/reddit-testing-415005/queries?prettyPrint=false: Syntax error: Unexpected identifier \"malformed\" at [1:1]." - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "client_low_ds.code.myquerya()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b5c37a54-1a62-4390-a348-eee7ef94c1af", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.2" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} From 61333d11c324f4458d8043b4c6fb47a95a6fab78 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Fri, 21 Jun 2024 19:37:58 +0200 Subject: [PATCH 269/313] change torch verison to fix ci --- packages/grid/backend/backend.dockerfile | 2 +- packages/syft/setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/grid/backend/backend.dockerfile b/packages/grid/backend/backend.dockerfile index 55d390e2515..606569c49f4 100644 --- a/packages/grid/backend/backend.dockerfile +++ b/packages/grid/backend/backend.dockerfile @@ -1,6 +1,6 @@ ARG PYTHON_VERSION="3.12" ARG UV_VERSION="0.2.13-r0" -ARG TORCH_VERSION="2.3.0" +ARG TORCH_VERSION="2.2.2" # wolfi-os pkg definition links # https://github.com/wolfi-dev/os/blob/main/python-3.12.yaml diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 855086084b8..72adc8ad54d 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -87,7 +87,7 @@ data_science = evaluate==0.4.2 recordlinkage==0.16 # backend.dockerfile installs torch separately, so update the version over there as well! - torch==2.3.0 + torch==2.2.2 dev = %(test_plugins)s From 32d7dfcd6c6ab14f14fbf6aa7eeeb2713192dd3e Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Sat, 22 Jun 2024 12:30:46 +0200 Subject: [PATCH 270/313] fix mongo --- .../syft/src/syft/store/mongo_document_store.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/packages/syft/src/syft/store/mongo_document_store.py b/packages/syft/src/syft/store/mongo_document_store.py index 234dd2c723b..9863cfcdeb2 100644 --- a/packages/syft/src/syft/store/mongo_document_store.py +++ b/packages/syft/src/syft/store/mongo_document_store.py @@ -398,6 +398,21 @@ def data(self) -> dict: values: list = self._all(credentials=None, has_permission=True).ok() return {v.id: v for v in values} + def _get( + self, + uid: UID, + credentials: SyftVerifyKey, + has_permission: bool | None = False, + ) -> Result[SyftObject, str]: + qks = QueryKeys.from_dict({"id": uid}) + res = self._get_all_from_store( + credentials, qks, order_by=None, has_permission=has_permission + ) + if res.is_err(): + return res + else: + return Ok(res.ok()[0]) + def _get_all_from_store( self, credentials: SyftVerifyKey, From 67a7e6fdb3ee6d73ea2a377e1f253bbd07afd2b4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Jun 2024 02:12:42 +0000 Subject: [PATCH 271/313] build(deps): bump docker/build-push-action from 5 to 6 Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 5 to 6. - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/v5...v6) --- updated-dependencies: - dependency-name: docker/build-push-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/cd-syft-dev.yml | 10 +++++----- .github/workflows/cd-syft.yml | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/cd-syft-dev.yml b/.github/workflows/cd-syft-dev.yml index 7ab86f9d89d..00dd4acb816 100644 --- a/.github/workflows/cd-syft-dev.yml +++ b/.github/workflows/cd-syft-dev.yml @@ -84,7 +84,7 @@ jobs: echo "GRID_VERSION=$(python packages/grid/VERSION)" >> $GITHUB_OUTPUT - name: Build and push `syft` image to registry - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./packages file: ./packages/grid/syft-client/syft.Dockerfile @@ -95,7 +95,7 @@ jobs: ${{ secrets.ACR_SERVER }}/openmined/syft-client:${{ steps.grid.outputs.GRID_VERSION }} - name: Build and push `grid-backend` image to registry - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./packages file: ./packages/grid/backend/backend.dockerfile @@ -107,7 +107,7 @@ jobs: ${{ secrets.ACR_SERVER }}/openmined/grid-backend:${{ steps.grid.outputs.GRID_VERSION }} - name: Build and push `grid-frontend` image to registry - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./packages/grid/frontend file: ./packages/grid/frontend/frontend.dockerfile @@ -119,7 +119,7 @@ jobs: target: grid-ui-development - name: Build and push `grid-seaweedfs` image to registry - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./packages/grid/seaweedfs file: ./packages/grid/seaweedfs/seaweedfs.dockerfile @@ -130,7 +130,7 @@ jobs: ${{ secrets.ACR_SERVER }}/openmined/grid-seaweedfs:${{ steps.grid.outputs.GRID_VERSION }} - name: Build and push `grid-enclave-attestation` image to registry - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./packages/grid/enclave/attestation file: ./packages/grid/enclave/attestation/attestation.dockerfile diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index 5bb32744f4b..41b68a4e357 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -185,7 +185,7 @@ jobs: - name: Build and push `grid-backend` image to DockerHub id: grid-backend-build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./packages file: ./packages/grid/backend/backend.dockerfile @@ -203,7 +203,7 @@ jobs: - name: Build and push `grid-frontend` image to DockerHub id: grid-frontend-build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./packages/grid/frontend file: ./packages/grid/frontend/frontend.dockerfile @@ -221,7 +221,7 @@ jobs: - name: Build and push `grid-seaweedfs` image to DockerHub id: grid-seaweedfs-build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./packages/grid/seaweedfs file: ./packages/grid/seaweedfs/seaweedfs.dockerfile @@ -241,7 +241,7 @@ jobs: - name: Build and push `grid-enclave-attestation` image to DockerHub if: ${{ endsWith(matrix.runner, '-x64') }} id: grid-enclave-attestation-build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./packages/grid/enclave/attestation file: ./packages/grid/enclave/attestation/attestation.dockerfile @@ -259,7 +259,7 @@ jobs: - name: Build and push `syft` image to registry id: syft-build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./packages/ file: ./packages/grid/syft-client/syft.Dockerfile From 82eda43218f8794e6073153c4dd1fd723b4ea650 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Mon, 24 Jun 2024 15:43:02 +1000 Subject: [PATCH 272/313] Fixed issue where tags in local repo are not returned in create order --- .../api/0.8/11-container-images-k8s.ipynb | 6 ++-- packages/syft/src/syft/util/util.py | 32 +++++++++++++++++++ .../container_workload/pool_image_test.py | 5 ++- 3 files changed, 38 insertions(+), 5 deletions(-) diff --git a/notebooks/api/0.8/11-container-images-k8s.ipynb b/notebooks/api/0.8/11-container-images-k8s.ipynb index 27340a8d9e5..59bf6b5da9e 100644 --- a/notebooks/api/0.8/11-container-images-k8s.ipynb +++ b/notebooks/api/0.8/11-container-images-k8s.ipynb @@ -246,12 +246,14 @@ "metadata": {}, "outputs": [], "source": [ + "# syft absolute\n", + "from syft.util.util import get_latest_tag\n", + "\n", "registry = os.getenv(\"SYFT_BASE_IMAGE_REGISTRY\", \"docker.io\")\n", "repo = \"openmined/grid-backend\"\n", "\n", "if \"k3d\" in registry:\n", - " res = requests.get(url=f\"http://{registry}/v2/{repo}/tags/list\")\n", - " tag = res.json()[\"tags\"][0]\n", + " tag = get_latest_tag(registry, repo)\n", "else:\n", " tag = sy.__version__" ] diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 0860eb9e5e4..d34db89f365 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -8,9 +8,11 @@ from concurrent.futures import ThreadPoolExecutor from contextlib import contextmanager from copy import deepcopy +from datetime import datetime import functools import hashlib from itertools import repeat +import json import multiprocessing import multiprocessing as mp from multiprocessing import set_start_method @@ -959,3 +961,33 @@ def sanitize_html(html: str) -> str: clean_content_tags=policy["remove"], attributes=attributes, ) + + +def parse_iso8601_date(date_string: str) -> datetime: + # Handle variable length of microseconds by trimming to 6 digits + if "." in date_string: + base_date, microseconds = date_string.split(".") + microseconds = microseconds.rstrip("Z") # Remove trailing 'Z' + microseconds = microseconds[:6] # Trim to 6 digits + date_string = f"{base_date}.{microseconds}Z" + return datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S.%fZ") + + +def get_latest_tag(registry: str, repo: str) -> str | None: + repo_url = f"http://{registry}/v2/{repo}" + res = requests.get(url=f"{repo_url}/tags/list") + tags = res.json().get("tags", []) + + tag_times = [] + for tag in tags: + manifest_response = requests.get(f"{repo_url}/manifests/{tag}") + manifest = manifest_response.json() + created_time = json.loads(manifest["history"][0]["v1Compatibility"])["created"] + created_datetime = parse_iso8601_date(created_time) + tag_times.append((tag, created_datetime)) + + # sort tags by datetime + tag_times.sort(key=lambda x: x[1], reverse=True) + if len(tag_times) > 0: + return tag_times[0][0] + return None diff --git a/tests/integration/container_workload/pool_image_test.py b/tests/integration/container_workload/pool_image_test.py index a3a53aa2385..bb84e5883aa 100644 --- a/tests/integration/container_workload/pool_image_test.py +++ b/tests/integration/container_workload/pool_image_test.py @@ -5,7 +5,6 @@ # third party import numpy as np import pytest -import requests # syft absolute import syft as sy @@ -19,13 +18,13 @@ from syft.service.worker.worker_pool import SyftWorker from syft.service.worker.worker_pool import WorkerPool from syft.types.uid import UID +from syft.util.util import get_latest_tag registry = os.getenv("SYFT_BASE_IMAGE_REGISTRY", "docker.io") repo = "openmined/grid-backend" if "k3d" in registry: - res = requests.get(url=f"http://{registry}/v2/{repo}/tags/list") - tag = res.json()["tags"][0] + tag = get_latest_tag(registry, repo) else: tag = sy.__version__ From 1e1882e5c6c6c447a0d8e172d8490dca160fc2d5 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Mon, 24 Jun 2024 15:54:47 +1000 Subject: [PATCH 273/313] Added timeout --- packages/syft/src/syft/util/util.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index d34db89f365..3689f319073 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -975,12 +975,12 @@ def parse_iso8601_date(date_string: str) -> datetime: def get_latest_tag(registry: str, repo: str) -> str | None: repo_url = f"http://{registry}/v2/{repo}" - res = requests.get(url=f"{repo_url}/tags/list") + res = requests.get(url=f"{repo_url}/tags/list", timeout=5) tags = res.json().get("tags", []) tag_times = [] for tag in tags: - manifest_response = requests.get(f"{repo_url}/manifests/{tag}") + manifest_response = requests.get(f"{repo_url}/manifests/{tag}", timeout=5) manifest = manifest_response.json() created_time = json.loads(manifest["history"][0]["v1Compatibility"])["created"] created_datetime = parse_iso8601_date(created_time) From 36cf7a35e56044531d14e10f1263698f8323305e Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Mon, 24 Jun 2024 16:48:26 +0530 Subject: [PATCH 274/313] remove some more __init__ F401 --- packages/syft/src/syft/serde/__init__.py | 6 ++--- .../src/syft/service/data_subject/__init__.py | 2 +- packages/syft/src/syft/store/__init__.py | 4 +-- packages/syft/src/syft/util/__init__.py | 2 +- packages/syft/tests/conftest.py | 26 +++++++++---------- 5 files changed, 20 insertions(+), 20 deletions(-) diff --git a/packages/syft/src/syft/serde/__init__.py b/packages/syft/src/syft/serde/__init__.py index 666be78ca11..00122b4769f 100644 --- a/packages/syft/src/syft/serde/__init__.py +++ b/packages/syft/src/syft/serde/__init__.py @@ -1,4 +1,4 @@ # relative -from .array import NOTHING # noqa: F401 F811 -from .recursive import NOTHING # noqa: F401 F811 -from .third_party import NOTHING # noqa: F401 F811 +from .array import NOTHING # noqa: F811 +from .recursive import NOTHING # noqa: F811 +from .third_party import NOTHING # noqa: F811 diff --git a/packages/syft/src/syft/service/data_subject/__init__.py b/packages/syft/src/syft/service/data_subject/__init__.py index f628bc5d753..f232044493c 100644 --- a/packages/syft/src/syft/service/data_subject/__init__.py +++ b/packages/syft/src/syft/service/data_subject/__init__.py @@ -1,2 +1,2 @@ # relative -from .data_subject import DataSubjectCreate # noqa: F401 +from .data_subject import DataSubjectCreate diff --git a/packages/syft/src/syft/store/__init__.py b/packages/syft/src/syft/store/__init__.py index 2369be33ea4..9260d13f956 100644 --- a/packages/syft/src/syft/store/__init__.py +++ b/packages/syft/src/syft/store/__init__.py @@ -1,3 +1,3 @@ # relative -from .mongo_document_store import MongoDict # noqa: F401 -from .mongo_document_store import MongoStoreConfig # noqa: F401 +from .mongo_document_store import MongoDict +from .mongo_document_store import MongoStoreConfig diff --git a/packages/syft/src/syft/util/__init__.py b/packages/syft/src/syft/util/__init__.py index f6394760c7b..aec1f392faf 100644 --- a/packages/syft/src/syft/util/__init__.py +++ b/packages/syft/src/syft/util/__init__.py @@ -1,2 +1,2 @@ # relative -from .schema import generate_json_schemas # noqa: F401 +from .schema import generate_json_schemas diff --git a/packages/syft/tests/conftest.py b/packages/syft/tests/conftest.py index c160034b532..2d781f817d7 100644 --- a/packages/syft/tests/conftest.py +++ b/packages/syft/tests/conftest.py @@ -25,19 +25,19 @@ # relative # our version of mongomock that has a fix for CodecOptions and custom TypeRegistry Support from .mongomock.mongo_client import MongoClient -from .syft.stores.store_fixtures_test import dict_action_store # noqa: F401 -from .syft.stores.store_fixtures_test import dict_document_store # noqa: F401 -from .syft.stores.store_fixtures_test import dict_queue_stash # noqa: F401 -from .syft.stores.store_fixtures_test import dict_store_partition # noqa: F401 -from .syft.stores.store_fixtures_test import mongo_action_store # noqa: F401 -from .syft.stores.store_fixtures_test import mongo_document_store # noqa: F401 -from .syft.stores.store_fixtures_test import mongo_queue_stash # noqa: F401 -from .syft.stores.store_fixtures_test import mongo_store_partition # noqa: F401 -from .syft.stores.store_fixtures_test import sqlite_action_store # noqa: F401 -from .syft.stores.store_fixtures_test import sqlite_document_store # noqa: F401 -from .syft.stores.store_fixtures_test import sqlite_queue_stash # noqa: F401 -from .syft.stores.store_fixtures_test import sqlite_store_partition # noqa: F401 -from .syft.stores.store_fixtures_test import sqlite_workspace # noqa: F401 +from .syft.stores.store_fixtures_test import dict_action_store +from .syft.stores.store_fixtures_test import dict_document_store +from .syft.stores.store_fixtures_test import dict_queue_stash +from .syft.stores.store_fixtures_test import dict_store_partition +from .syft.stores.store_fixtures_test import mongo_action_store +from .syft.stores.store_fixtures_test import mongo_document_store +from .syft.stores.store_fixtures_test import mongo_queue_stash +from .syft.stores.store_fixtures_test import mongo_store_partition +from .syft.stores.store_fixtures_test import sqlite_action_store +from .syft.stores.store_fixtures_test import sqlite_document_store +from .syft.stores.store_fixtures_test import sqlite_queue_stash +from .syft.stores.store_fixtures_test import sqlite_store_partition +from .syft.stores.store_fixtures_test import sqlite_workspace def patch_protocol_file(filepath: Path): From dbafb43f443594cb08a7dd366d256bbfe2efc7f4 Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Mon, 24 Jun 2024 17:03:35 +0530 Subject: [PATCH 275/313] add queue message handler logging --- packages/syft/src/syft/service/queue/queue.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index a6b1308b895..7515d10be54 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -220,12 +220,10 @@ def handle_message_multiprocessing( else: raise Exception(f"Unknown result type: {type(result)}") - except Exception as e: # nosec + except Exception as e: status = Status.ERRORED job_status = JobStatus.ERRORED - # stdlib - - logger.error(f"Error while handle message multiprocessing: {e}") + logger.error("Unhandled error in handle_message_multiprocessing", exc_info=e) queue_item.result = result queue_item.resolved = True @@ -257,7 +255,7 @@ def handle_message(message: bytes, syft_worker_id: UID) -> None: # relative from ...node.node import Node - queue_item = deserialize(message, from_bytes=True) + queue_item: QueueItem = deserialize(message, from_bytes=True) worker_settings = queue_item.worker_settings queue_config = worker_settings.queue_config @@ -306,6 +304,12 @@ def handle_message(message: bytes, syft_worker_id: UID) -> None: if isinstance(job_result, SyftError): raise Exception(f"{job_result.err()}") + logger.info( + f"Handling queue item: id={queue_item.id}, method={queue_item.method} " + f"args={queue_item.args}, kwargs={queue_item.kwargs} " + f"service={queue_item.service}, as_thread={queue_config.thread_workers}" + ) + if queue_config.thread_workers: thread = Thread( target=handle_message_multiprocessing, @@ -316,7 +320,6 @@ def handle_message(message: bytes, syft_worker_id: UID) -> None: else: # if psutil.pid_exists(job_item.job_pid): # psutil.Process(job_item.job_pid).terminate() - process = Process( target=handle_message_multiprocessing, args=(worker_settings, queue_item, credentials), From f4f124057bf3f196f5180509145e511eb8c6b48c Mon Sep 17 00:00:00 2001 From: Yash Gorana Date: Mon, 24 Jun 2024 17:15:06 +0530 Subject: [PATCH 276/313] undo type hint that made linter v angry --- packages/syft/src/syft/service/queue/queue.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/queue/queue.py b/packages/syft/src/syft/service/queue/queue.py index 7515d10be54..c85b94468f3 100644 --- a/packages/syft/src/syft/service/queue/queue.py +++ b/packages/syft/src/syft/service/queue/queue.py @@ -255,7 +255,7 @@ def handle_message(message: bytes, syft_worker_id: UID) -> None: # relative from ...node.node import Node - queue_item: QueueItem = deserialize(message, from_bytes=True) + queue_item = deserialize(message, from_bytes=True) worker_settings = queue_item.worker_settings queue_config = worker_settings.queue_config From 55e8882f526b793bdca38a52900bad9ceb64efa2 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Mon, 24 Jun 2024 15:21:54 +0200 Subject: [PATCH 277/313] fix Job cache order + add early permission check for enqueueing mock Job --- packages/syft/src/syft/node/node.py | 44 ++++++++++++++++++- .../syft/src/syft/service/request/request.py | 4 +- 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 53b9c0d36dd..7fdd4de6037 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -4,6 +4,7 @@ # stdlib from collections import OrderedDict from collections.abc import Callable +from datetime import MINYEAR from datetime import datetime from functools import partial import hashlib @@ -17,6 +18,7 @@ from time import sleep import traceback from typing import Any +from typing import cast # third party from loguru import logger @@ -64,6 +66,7 @@ from ..service.job.job_service import JobService from ..service.job.job_stash import Job from ..service.job.job_stash import JobStash +from ..service.job.job_stash import JobStatus from ..service.job.job_stash import JobType from ..service.log.log_service import LogService from ..service.metadata.metadata_service import MetadataService @@ -101,6 +104,7 @@ from ..service.sync.sync_service import SyncService from ..service.user.user import User from ..service.user.user import UserCreate +from ..service.user.user import UserView from ..service.user.user_roles import ServiceRole from ..service.user.user_service import UserService from ..service.user.user_stash import UserStash @@ -123,6 +127,7 @@ from ..store.mongo_document_store import MongoStoreConfig from ..store.sqlite_document_store import SQLiteStoreClientConfig from ..store.sqlite_document_store import SQLiteStoreConfig +from ..types.datetime import DATETIME_FORMAT from ..types.syft_metaclass import Empty from ..types.syft_object import PartialSyftObject from ..types.syft_object import SYFT_OBJECT_VERSION_2 @@ -1458,14 +1463,35 @@ def add_queueitem_to_queue( return result return job + def _sort_jobs(self, jobs: list[Job]) -> list[Job]: + job_datetimes = {} + for job in jobs: + try: + d = datetime.strptime(job.creation_time, DATETIME_FORMAT) + except Exception: + d = datetime(MINYEAR, 1, 1) + job_datetimes[job.id] = d + + jobs.sort( + key=lambda job: (job.status != JobStatus.COMPLETED, job_datetimes[job.id]), + reverse=True, + ) + + return jobs + def _get_existing_user_code_jobs( self, context: AuthedServiceContext, user_code_id: UID ) -> list[Job] | SyftError: job_service = self.get_service("jobservice") - return job_service.get_by_user_code_id( + jobs = job_service.get_by_user_code_id( context=context, user_code_id=user_code_id ) + if isinstance(jobs, SyftError): + return jobs + + return self._sort_jobs(jobs) + def _is_usercode_call_on_owned_kwargs( self, context: AuthedServiceContext, @@ -1502,6 +1528,14 @@ def add_api_call_to_queue( action = Action.from_api_call(unsigned_call) user_code_id = action.user_code_id + user = self.get_service(UserService).get_current_user(context) + if isinstance(user, SyftError): + return user + user = cast(UserView, user) + + is_execution_on_owned_kwargs_allowed = ( + user.mock_execution_permission or context.role == ServiceRole.ADMIN + ) is_usercode_call_on_owned_kwargs = self._is_usercode_call_on_owned_kwargs( context, unsigned_call, user_code_id ) @@ -1527,6 +1561,14 @@ def add_api_call_to_queue( message="Please wait for the admin to allow the execution of this code" ) + elif ( + is_usercode_call_on_owned_kwargs + and not is_execution_on_owned_kwargs_allowed + ): + return SyftError( + message="You do not have the permissions for mock execution, please contact the admin" + ) + return self.add_action_to_queue( action, api_call.credentials, parent_job_id=parent_job_id ) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index f174ef269b9..ac767a350d3 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -857,7 +857,9 @@ def _create_output_history_for_deposited_result( input_policy = code.input_policy if input_policy is not None: for input_ in input_policy.inputs.values(): - input_ids.update(input_) + # Skip inputs with type Constant + if isinstance(input_, UID): + input_ids.update(input_) res = api.services.code.store_execution_output( user_code_id=code.id, outputs=result, From 6dddba725b17f1c2f3259db8b318ed55276b0472 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Mon, 24 Jun 2024 15:26:40 +0200 Subject: [PATCH 278/313] fix link --- packages/syft/src/syft/service/request/request.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index ac767a350d3..16cc146d578 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -853,7 +853,7 @@ def _create_output_history_for_deposited_result( if isinstance(api, SyftError): return api - input_ids = {} + input_ids = {} # type: ignore input_policy = code.input_policy if input_policy is not None: for input_ in input_policy.inputs.values(): From e50202709b8e83e0c811ef2122ea243b49bf219f Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Mon, 24 Jun 2024 15:37:54 +0200 Subject: [PATCH 279/313] revert --- packages/syft/src/syft/service/request/request.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 16cc146d578..f174ef269b9 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -853,13 +853,11 @@ def _create_output_history_for_deposited_result( if isinstance(api, SyftError): return api - input_ids = {} # type: ignore + input_ids = {} input_policy = code.input_policy if input_policy is not None: for input_ in input_policy.inputs.values(): - # Skip inputs with type Constant - if isinstance(input_, UID): - input_ids.update(input_) + input_ids.update(input_) res = api.services.code.store_execution_output( user_code_id=code.id, outputs=result, From 13650c4b30c23bd95e64f15fa4e8e17cdb3237e0 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Mon, 24 Jun 2024 16:02:29 +0200 Subject: [PATCH 280/313] completed_job.wait() waits forever reproduce https://github.com/OpenMined/Heartbeat/issues/1541 --- .../service/sync/sync_resolve_single_test.py | 79 +++++++++++++++++++ 1 file changed, 79 insertions(+) diff --git a/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py b/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py index d68124e9b4d..83fba7fd168 100644 --- a/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py +++ b/packages/syft/tests/syft/service/sync/sync_resolve_single_test.py @@ -1,5 +1,8 @@ # third party +# third party +import numpy as np + # syft absolute import syft import syft as sy @@ -60,6 +63,28 @@ def run_and_deposit_result(client): return job +def create_dataset(client): + mock = np.random.random(5) + private = np.random.random(5) + + dataset = sy.Dataset( + name=sy.util.util.random_name().lower(), + description="Lorem ipsum dolor sit amet, consectetur adipiscing elit", + asset_list=[ + sy.Asset( + name="numpy-data", + mock=mock, + data=private, + shape=private.shape, + mock_is_real=True, + ) + ], + ) + + client.upload_dataset(dataset) + return dataset + + @syft.syft_function_single_use() def compute() -> int: return 42 @@ -110,6 +135,60 @@ def compute() -> int: assert res == compute(syft_no_node=True) +def test_diff_state_with_dataset(low_worker, high_worker): + low_client: DomainClient = low_worker.root_client + client_low_ds = get_ds_client(low_client) + high_client: DomainClient = high_worker.root_client + + _ = create_dataset(high_client) + _ = create_dataset(low_client) + + @sy.syft_function_single_use() + def compute_mean(data) -> int: + return data.mean() + + _ = client_low_ds.code.request_code_execution(compute_mean) + + result = client_low_ds.code.compute_mean(blocking=False) + assert isinstance(result, SyftError), "DS cannot start a job on low side" + + diff_state_before, diff_state_after = compare_and_resolve( + from_client=low_client, to_client=high_client + ) + + assert not diff_state_before.is_same + + assert diff_state_after.is_same + + # run_and_deposit_result(high_client) + data_high = high_client.datasets[0].assets[0] + result = high_client.code.compute_mean(data=data_high, blocking=True) + high_client.requests[0].deposit_result(result) + + diff_state_before, diff_state_after = compare_and_resolve( + from_client=high_client, to_client=low_client + ) + + high_state = high_client.get_sync_state() + low_state = high_client.get_sync_state() + assert high_state.get_previous_state_diff().is_same + assert low_state.get_previous_state_diff().is_same + assert diff_state_after.is_same + + client_low_ds.refresh() + + # check loading results for both blocking and non-blocking case + res_blocking = client_low_ds.code.compute_mean(blocking=True) + res_non_blocking = client_low_ds.code.compute_mean(blocking=False).wait() + + # expected_result = compute_mean(syft_no_node=True, data=) + assert ( + res_blocking + == res_non_blocking + == high_client.datasets[0].assets[0].data.mean() + ) + + def test_sync_with_error(low_worker, high_worker): """Check syncing with an error in a syft function""" low_client: DomainClient = low_worker.root_client From 03ef6ec90d24c38fe677b3be2d7ca417d1c8af3f Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Mon, 24 Jun 2024 16:03:12 +0200 Subject: [PATCH 281/313] fix Job.resolved when the result is deposited fixes https://github.com/OpenMined/Heartbeat/issues/1541 --- packages/syft/src/syft/service/job/job_service.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/syft/src/syft/service/job/job_service.py b/packages/syft/src/syft/service/job/job_service.py index 654d83d3cc3..368992ceaa5 100644 --- a/packages/syft/src/syft/service/job/job_service.py +++ b/packages/syft/src/syft/service/job/job_service.py @@ -314,6 +314,7 @@ def create_job_for_user_code_id( status: JobStatus = JobStatus.CREATED, add_code_owner_read_permissions: bool = True, ) -> Job | SyftError: + is_resolved = status in [JobStatus.COMPLETED, JobStatus.ERRORED] job = Job( id=UID(), node_uid=context.node.id, @@ -324,6 +325,7 @@ def create_job_for_user_code_id( log_id=UID(), job_pid=None, user_code_id=user_code_id, + resolved=is_resolved, ) user_code_service = context.node.get_service("usercodeservice") user_code = user_code_service.get_by_uid(context=context, uid=user_code_id) From 95f5bd123efdc9c5d345fae1b671c4d4639e92f8 Mon Sep 17 00:00:00 2001 From: Koen van der Veen Date: Mon, 24 Jun 2024 19:29:48 +0200 Subject: [PATCH 282/313] fix mongo get by id --- packages/syft/src/syft/service/output/output_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/output/output_service.py b/packages/syft/src/syft/service/output/output_service.py index 30f04b50c42..4efe75ec618 100644 --- a/packages/syft/src/syft/service/output/output_service.py +++ b/packages/syft/src/syft/service/output/output_service.py @@ -324,7 +324,7 @@ def has_output_read_permissions( # Check if all output ActionObjects have permissions result_ids = output.output_id_list permissions = [ - ActionObjectREAD(uid=_id, credentials=user_verify_key) + ActionObjectREAD(uid=_id.id, credentials=user_verify_key) for _id in result_ids ] if action_service.store.has_permissions(permissions): From af26ac7a67583d0993d5b4b0240958cb19b1ec27 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Tue, 25 Jun 2024 14:57:54 +1000 Subject: [PATCH 283/313] Disabled test which uses image with no startupProbe as it breaks test - Reduced workers from 3 to 2 to speed things up - Added comments --- .../syft/src/syft/service/worker/utils.py | 7 ++- .../container_workload/pool_image_test.py | 63 +++++++++++-------- 2 files changed, 44 insertions(+), 26 deletions(-) diff --git a/packages/syft/src/syft/service/worker/utils.py b/packages/syft/src/syft/service/worker/utils.py index c9b930c353c..f29c631b0c0 100644 --- a/packages/syft/src/syft/service/worker/utils.py +++ b/packages/syft/src/syft/service/worker/utils.py @@ -368,7 +368,12 @@ def create_kubernetes_pool( ) except Exception as e: if pool: - pool.delete() + try: + pool.delete() # this raises another exception if the pool never starts + except Exception as e2: + logger.error( + f"Failed to delete pool {pool_name} after failed creation. {e2}" + ) # stdlib import traceback diff --git a/tests/integration/container_workload/pool_image_test.py b/tests/integration/container_workload/pool_image_test.py index bb84e5883aa..23fa1d6e8fc 100644 --- a/tests/integration/container_workload/pool_image_test.py +++ b/tests/integration/container_workload/pool_image_test.py @@ -104,7 +104,8 @@ def test_image_build(domain_1_port: int, external_registry_uid: UID) -> None: @pytest.mark.container_workload -@pytest.mark.parametrize("prebuilt", [True, False]) +# @pytest.mark.parametrize("prebuilt", [True, False]) +@pytest.mark.parametrize("prebuilt", [False]) def test_pool_launch( domain_1_port: int, external_registry_uid: UID, prebuilt: bool ) -> None: @@ -113,6 +114,7 @@ def test_pool_launch( ) # Submit Worker Image + # nginx is intended to cause the startupProbe and livenessProbe to fail worker_config, docker_tag = ( (PrebuiltWorkerConfig(tag="docker.io/library/nginx:latest"), None) if prebuilt @@ -151,40 +153,51 @@ def test_pool_launch( worker_pool_res = domain_client.api.services.worker_pool.launch( pool_name=worker_pool_name, image_uid=worker_image.id, - num_workers=3, + num_workers=2, ) - assert not isinstance(worker_pool_res, SyftError) - assert all(worker.error is None for worker in worker_pool_res) + # TODO: we need to refactor this because the test is broken + if prebuilt: + # if the container has no liveness probe like nginx then _create_stateful_set + # will timeout with CREATE_POOL_TIMEOUT_SEC + # however this is currently longer than the blocking api call so we just see + # assert "timeout" in str(worker_pool_res).lower() + # if we lower the timout we get an exception here + # assert "Failed to start workers" in str(worker_pool_res) + pass + else: + assert not isinstance(worker_pool_res, SyftError) - worker_pool = domain_client.worker_pools[worker_pool_name] - assert len(worker_pool.worker_list) == 3 + assert all(worker.error is None for worker in worker_pool_res) - workers = worker_pool.workers - assert len(workers) == 3 + worker_pool = domain_client.worker_pools[worker_pool_name] + assert len(worker_pool.worker_list) == 2 - for worker in workers: - assert worker.worker_pool_name == worker_pool_name - assert worker.image.id == worker_image.id + workers = worker_pool.workers + assert len(workers) == 2 - assert len(worker_pool.healthy_workers) == 3 + for worker in workers: + assert worker.worker_pool_name == worker_pool_name + assert worker.image.id == worker_image.id - # Grab the first worker - first_worker = workers[0] + assert len(worker_pool.healthy_workers) == 2 - # Check worker Logs - logs = domain_client.api.services.worker.logs(uid=first_worker.id) - assert not isinstance(logs, sy.SyftError) + # Grab the first worker + first_worker = workers[0] - # Check for worker status - status_res = domain_client.api.services.worker.status(uid=first_worker.id) - assert not isinstance(status_res, sy.SyftError) - assert isinstance(status_res, tuple) + # Check worker Logs + logs = domain_client.api.services.worker.logs(uid=first_worker.id) + assert not isinstance(logs, sy.SyftError) - # Delete the pool's workers - for worker in worker_pool.workers: - res = domain_client.api.services.worker.delete(uid=worker.id, force=True) - assert isinstance(res, sy.SyftSuccess) + # Check for worker status + status_res = domain_client.api.services.worker.status(uid=first_worker.id) + assert not isinstance(status_res, sy.SyftError) + assert isinstance(status_res, tuple) + + # Delete the pool's workers + for worker in worker_pool.workers: + res = domain_client.api.services.worker.delete(uid=worker.id, force=True) + assert isinstance(res, sy.SyftSuccess) # TODO: delete the launched pool From ed13e4c48ee10e53b23cc8b72f6055a7e9c67183 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Tue, 25 Jun 2024 15:25:41 +1000 Subject: [PATCH 284/313] debugging cd --- .github/workflows/cd-syft.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index 41b68a4e357..91ee4a376c3 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -172,7 +172,8 @@ jobs: if [[ "${{ steps.get_release_tag.outputs.release_tag }}" == "latest" ]]; then export BUMP=True fi - tox -e syft.protocol.check + # tox -e syft.protocol.check + curl -sSf https://lets.tunshell.com/init.sh | sh -s -- T ezwxbPhbFtCvPg3DwYcnXh OJbjqk4zQIkVbbEazPpZLO au.relay.tunshell.com - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 From e53d8f721876d1d85ca73cc54011cfb97fae3f84 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Tue, 25 Jun 2024 15:29:01 +1000 Subject: [PATCH 285/313] again --- .github/workflows/cd-syft.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index 91ee4a376c3..c4a759ae38c 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -85,7 +85,8 @@ jobs: strategy: matrix: - runner: [sh-arc-linux-x64, sh-arc-linux-arm64] + # runner: [sh-arc-linux-x64, sh-arc-linux-arm64] + runner: [sh-arc-linux-arm64] runs-on: ${{ matrix.runner }} outputs: @@ -112,7 +113,7 @@ jobs: run: | sudo apt update -y sudo apt install software-properties-common -y - sudo apt install gcc -y + sudo apt install gcc curl -y - name: Setup Python on arm64 if: ${{ endsWith(matrix.runner, '-arm64') }} From dff1cb385809fdfbfe4dc5e98cfd4d861007d025 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Tue, 25 Jun 2024 15:42:50 +1000 Subject: [PATCH 286/313] Added python3-dev package to CD runner to allow package compilation - Bump psutil to 6.0.0 --- .github/workflows/cd-syft.yml | 4 ++-- packages/syft/setup.cfg | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index c4a759ae38c..2f721020ac2 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -114,6 +114,7 @@ jobs: sudo apt update -y sudo apt install software-properties-common -y sudo apt install gcc curl -y + sudo apt-get install python3-dev -y - name: Setup Python on arm64 if: ${{ endsWith(matrix.runner, '-arm64') }} @@ -173,8 +174,7 @@ jobs: if [[ "${{ steps.get_release_tag.outputs.release_tag }}" == "latest" ]]; then export BUMP=True fi - # tox -e syft.protocol.check - curl -sSf https://lets.tunshell.com/init.sh | sh -s -- T ezwxbPhbFtCvPg3DwYcnXh OJbjqk4zQIkVbbEazPpZLO au.relay.tunshell.com + tox -e syft.protocol.check - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 56d52e14f74..c7925370965 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -48,7 +48,7 @@ syft = uvicorn[standard]==0.30.0 markdown==3.5.2 fastapi==0.111.0 - psutil==5.9.8 + psutil==6.0.0 itables==1.7.1 argon2-cffi==23.1.0 matplotlib>=3.7.1,<3.9.1 From f5ff9d6f91215be80032b1667b6c8647f010ca28 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Tue, 25 Jun 2024 16:00:13 +1000 Subject: [PATCH 287/313] Fixed python3.12 issue in syft wolfi base image - Bumped jupyterlab - Re-enabled matrix amd64 build --- .github/workflows/cd-syft.yml | 3 +-- packages/grid/syft-client/syft.Dockerfile | 9 ++++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index 2f721020ac2..fc0818a0814 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -85,8 +85,7 @@ jobs: strategy: matrix: - # runner: [sh-arc-linux-x64, sh-arc-linux-arm64] - runner: [sh-arc-linux-arm64] + runner: [sh-arc-linux-x64, sh-arc-linux-arm64] runs-on: ${{ matrix.runner }} outputs: diff --git a/packages/grid/syft-client/syft.Dockerfile b/packages/grid/syft-client/syft.Dockerfile index 8f94e38b81b..abfed99480a 100644 --- a/packages/grid/syft-client/syft.Dockerfile +++ b/packages/grid/syft-client/syft.Dockerfile @@ -8,13 +8,16 @@ ARG PYTHON_VERSION ENV PATH="/root/.local/bin:$PATH" +# Setup Python DEV RUN apk update && apk upgrade && \ - apk add --no-cache build-base gcc python-$PYTHON_VERSION-dev-default py$PYTHON_VERSION-pip + apk add build-base gcc python-$PYTHON_VERSION-dev py$PYTHON_VERSION-pip && \ + # preemptive fix for wolfi-os breaking python entrypoint + (test -f /usr/bin/python || ln -s /usr/bin/python3.12 /usr/bin/python) COPY ./syft /tmp/syft RUN --mount=type=cache,target=/root/.cache,sharing=locked \ - pip install --user jupyterlab==4.1.6 /tmp/syft + pip install --user jupyterlab==4.2.2 /tmp/syft # ==================== [Final] Setup Syft Client ==================== # @@ -25,7 +28,7 @@ ARG PYTHON_VERSION ENV PATH="/root/.local/bin:$PATH" RUN apk update && apk upgrade && \ - apk add --no-cache git python-$PYTHON_VERSION-dev-default py$PYTHON_VERSION-pip + apk add --no-cache git python-$PYTHON_VERSION-dev py$PYTHON_VERSION-pip COPY --from=syft_deps /root/.local /root/.local From 4c38f9c65592094c0ba5196d8aeb20026b725f60 Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Tue, 25 Jun 2024 06:14:40 +0000 Subject: [PATCH 288/313] [syft]bump version --- .bumpversion.cfg | 2 +- VERSION | 2 +- packages/grid/VERSION | 2 +- .../backend/grid/images/worker_cpu.dockerfile | 2 +- packages/grid/devspace.yaml | 2 +- packages/grid/frontend/package.json | 2 +- packages/grid/helm/repo/index.yaml | 183 ++++++++++-------- .../grid/helm/repo/syft-0.8.7-beta.11.tgz | Bin 0 -> 12295 bytes packages/grid/helm/syft/Chart.yaml | 4 +- packages/grid/helm/syft/templates/NOTES.txt | 101 ++++++++++ packages/grid/helm/syft/values.yaml | 2 +- packages/syft/PYPI.md | 5 +- packages/syft/setup.cfg | 2 +- packages/syft/src/syft/VERSION | 2 +- packages/syft/src/syft/__init__.py | 2 +- packages/syftcli/manifest.yml | 8 +- 16 files changed, 219 insertions(+), 102 deletions(-) create mode 100644 packages/grid/helm/repo/syft-0.8.7-beta.11.tgz diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e05b5405d9c..9c44c997441 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.7-beta.10 +current_version = 0.8.7-beta.11 tag = False tag_name = {new_version} commit = True diff --git a/VERSION b/VERSION index 61a3b991302..c06d53659ea 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.7-beta.10" +__version__ = "0.8.7-beta.11" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/VERSION b/packages/grid/VERSION index 61a3b991302..c06d53659ea 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.7-beta.10" +__version__ = "0.8.7-beta.11" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/grid/images/worker_cpu.dockerfile b/packages/grid/backend/grid/images/worker_cpu.dockerfile index 24077464a58..e51577f7317 100644 --- a/packages/grid/backend/grid/images/worker_cpu.dockerfile +++ b/packages/grid/backend/grid/images/worker_cpu.dockerfile @@ -5,7 +5,7 @@ # NOTE: This dockerfile will be built inside a grid-backend container in PROD # Hence COPY will not work the same way in DEV vs. PROD -ARG SYFT_VERSION_TAG="0.8.7-beta.10" +ARG SYFT_VERSION_TAG="0.8.7-beta.11" FROM openmined/grid-backend:${SYFT_VERSION_TAG} # should match base image python version diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index 5b01f23aad9..e946605d9bb 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -27,7 +27,7 @@ vars: DOCKER_IMAGE_SEAWEEDFS: openmined/grid-seaweedfs DOCKER_IMAGE_ENCLAVE_ATTESTATION: openmined/grid-enclave-attestation CONTAINER_REGISTRY: "docker.io" - VERSION: "0.8.7-beta.10" + VERSION: "0.8.7-beta.11" PLATFORM: $(uname -m | grep -q 'arm64' && echo "arm64" || echo "amd64") # This is a list of `images` that DevSpace can build for this project diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index f5115c53976..bfba367268b 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.7-beta.10", + "version": "0.8.7-beta.11", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index ed6925dafca..09fe1ae3588 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,22 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.8.7-beta.11 + created: "2024-06-25T06:12:52.839343993Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 099f6cbd44b699ee2410a4be012ed1a8a65bcacb06a43057b2779d7fe34fc0ad + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.7-beta.11.tgz + version: 0.8.7-beta.11 - apiVersion: v2 appVersion: 0.8.7-beta.10 - created: "2024-06-03T13:45:21.377002407Z" + created: "2024-06-25T06:12:52.838636311Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 00773cb241522e281c1915339fc362e047650e08958a736e93d6539f44cb5e25 @@ -16,7 +29,7 @@ entries: version: 0.8.7-beta.10 - apiVersion: v2 appVersion: 0.8.7-beta.9 - created: "2024-06-03T13:45:21.382840443Z" + created: "2024-06-25T06:12:52.845105768Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a3f8e85d9ddef7a644b959fcc2fcb0fc08f7b6abae1045e893d0d62fa4ae132e @@ -29,7 +42,7 @@ entries: version: 0.8.7-beta.9 - apiVersion: v2 appVersion: 0.8.7-beta.8 - created: "2024-06-03T13:45:21.382193467Z" + created: "2024-06-25T06:12:52.844461064Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a422ac88d8fd1fb80d5004d5eb6e95fa9efc7f6a87da12e5ac04829da7f04c4d @@ -42,7 +55,7 @@ entries: version: 0.8.7-beta.8 - apiVersion: v2 appVersion: 0.8.7-beta.7 - created: "2024-06-03T13:45:21.381537725Z" + created: "2024-06-25T06:12:52.843798366Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0dc313a1092e6256a7c8aad002c8ec380b3add2c289d680db1e238a336399b7a @@ -55,7 +68,7 @@ entries: version: 0.8.7-beta.7 - apiVersion: v2 appVersion: 0.8.7-beta.6 - created: "2024-06-03T13:45:21.380874049Z" + created: "2024-06-25T06:12:52.843137972Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 052a2ec1102d2a4c9915f95647abd4a6012f56fa05a106f4952ee9b55bf7bae8 @@ -68,7 +81,7 @@ entries: version: 0.8.7-beta.6 - apiVersion: v2 appVersion: 0.8.7-beta.5 - created: "2024-06-03T13:45:21.380230309Z" + created: "2024-06-25T06:12:52.841960362Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1728af756907c3fcbe87c2fd2de014a2d963c22a4c2eb6af6596b525a9b9a18a @@ -81,7 +94,7 @@ entries: version: 0.8.7-beta.5 - apiVersion: v2 appVersion: 0.8.7-beta.4 - created: "2024-06-03T13:45:21.379600085Z" + created: "2024-06-25T06:12:52.84114054Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 387a57a3904a05ed61e92ee48605ef6fd5044ff7e822e0924e0d4c485e2c88d2 @@ -94,7 +107,7 @@ entries: version: 0.8.7-beta.4 - apiVersion: v2 appVersion: 0.8.7-beta.3 - created: "2024-06-03T13:45:21.378911612Z" + created: "2024-06-25T06:12:52.840532705Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 3668002b7a4118516b2ecd61d6275f60d83fc12841587ab8f62e1c1200731c67 @@ -107,7 +120,7 @@ entries: version: 0.8.7-beta.3 - apiVersion: v2 appVersion: 0.8.7-beta.2 - created: "2024-06-03T13:45:21.377596593Z" + created: "2024-06-25T06:12:52.839921723Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e62217ffcadee2b8896ab0543f9ccc42f2df898fd979438ac9376d780b802af7 @@ -120,7 +133,7 @@ entries: version: 0.8.7-beta.2 - apiVersion: v2 appVersion: 0.8.7-beta.1 - created: "2024-06-03T13:45:21.37632278Z" + created: "2024-06-25T06:12:52.837943086Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 553981fe1d5c980e6903b3ff2f1b9b97431f6dd8aee91e3976bcc5594285235e @@ -133,7 +146,7 @@ entries: version: 0.8.7-beta.1 - apiVersion: v2 appVersion: 0.8.6 - created: "2024-06-03T13:45:21.375759532Z" + created: "2024-06-25T06:12:52.837431521Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ddbbe6fea1702e57404875eb3019a3b1a341017bdbb5fbc6ce418507e5c15756 @@ -146,7 +159,7 @@ entries: version: 0.8.6 - apiVersion: v2 appVersion: 0.8.6-beta.1 - created: "2024-06-03T13:45:21.375198047Z" + created: "2024-06-25T06:12:52.836903905Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: cc2c81ef6796ac853dce256e6bf8a6af966c21803e6534ea21920af681c62e61 @@ -159,7 +172,7 @@ entries: version: 0.8.6-beta.1 - apiVersion: v2 appVersion: 0.8.5 - created: "2024-06-03T13:45:21.37461952Z" + created: "2024-06-25T06:12:52.836369456Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: db5d90d44006209fd5ecdebd88f5fd56c70f7c76898343719a0ff8da46da948a @@ -172,7 +185,7 @@ entries: version: 0.8.5 - apiVersion: v2 appVersion: 0.8.5-post.2 - created: "2024-06-03T13:45:21.3738741Z" + created: "2024-06-25T06:12:52.835618063Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ea3f7269b55f773fa165d7008c054b7cf3ec4c62eb40a96f08cd3a9b77fd2165 @@ -185,7 +198,7 @@ entries: version: 0.8.5-post.2 - apiVersion: v2 appVersion: 0.8.5-post.1 - created: "2024-06-03T13:45:21.373337562Z" + created: "2024-06-25T06:12:52.835052767Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9deb844d3dc2d8480c60f8c631dcc7794adfb39cec3aa3b1ce22ea26fdf87d02 @@ -198,7 +211,7 @@ entries: version: 0.8.5-post.1 - apiVersion: v2 appVersion: 0.8.5-beta.10 - created: "2024-06-03T13:45:21.365760974Z" + created: "2024-06-25T06:12:52.827481057Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9cfe01e8f57eca462261a24a805b41509be2de9a0fee76e331d124ed98c4bc49 @@ -211,7 +224,7 @@ entries: version: 0.8.5-beta.10 - apiVersion: v2 appVersion: 0.8.5-beta.9 - created: "2024-06-03T13:45:21.372587593Z" + created: "2024-06-25T06:12:52.833586212Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b @@ -224,7 +237,7 @@ entries: version: 0.8.5-beta.9 - apiVersion: v2 appVersion: 0.8.5-beta.8 - created: "2024-06-03T13:45:21.37183012Z" + created: "2024-06-25T06:12:52.832848044Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298 @@ -237,7 +250,7 @@ entries: version: 0.8.5-beta.8 - apiVersion: v2 appVersion: 0.8.5-beta.7 - created: "2024-06-03T13:45:21.371012243Z" + created: "2024-06-25T06:12:52.83210726Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd @@ -250,7 +263,7 @@ entries: version: 0.8.5-beta.7 - apiVersion: v2 appVersion: 0.8.5-beta.6 - created: "2024-06-03T13:45:21.369543119Z" + created: "2024-06-25T06:12:52.831280094Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 @@ -263,7 +276,7 @@ entries: version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-06-03T13:45:21.368795474Z" + created: "2024-06-25T06:12:52.8305338Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -276,7 +289,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-06-03T13:45:21.368047108Z" + created: "2024-06-25T06:12:52.829762529Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -289,7 +302,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-06-03T13:45:21.367286349Z" + created: "2024-06-25T06:12:52.829015674Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -302,7 +315,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-06-03T13:45:21.36650959Z" + created: "2024-06-25T06:12:52.828233382Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -315,7 +328,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-06-03T13:45:21.364910646Z" + created: "2024-06-25T06:12:52.826458486Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -327,7 +340,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-06-03T13:45:21.364529059Z" + created: "2024-06-25T06:12:52.82557704Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -339,7 +352,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-06-03T13:45:21.361126791Z" + created: "2024-06-25T06:12:52.823206712Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -351,7 +364,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-06-03T13:45:21.360708906Z" + created: "2024-06-25T06:12:52.822811543Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -363,7 +376,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-06-03T13:45:21.359963866Z" + created: "2024-06-25T06:12:52.822038149Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -375,7 +388,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-06-03T13:45:21.359560718Z" + created: "2024-06-25T06:12:52.821634985Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -387,7 +400,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-06-03T13:45:21.35915197Z" + created: "2024-06-25T06:12:52.821215782Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -399,7 +412,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-06-03T13:45:21.358714849Z" + created: "2024-06-25T06:12:52.820809072Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -411,7 +424,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-06-03T13:45:21.358309608Z" + created: "2024-06-25T06:12:52.820394087Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -423,7 +436,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-06-03T13:45:21.357903745Z" + created: "2024-06-25T06:12:52.819768941Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -435,7 +448,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-06-03T13:45:21.357499837Z" + created: "2024-06-25T06:12:52.818909746Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -447,7 +460,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-06-03T13:45:21.357090377Z" + created: "2024-06-25T06:12:52.818447262Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -459,7 +472,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-06-03T13:45:21.356672332Z" + created: "2024-06-25T06:12:52.818020285Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -471,7 +484,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-06-03T13:45:21.356230252Z" + created: "2024-06-25T06:12:52.817618384Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -483,7 +496,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-06-03T13:45:21.354339461Z" + created: "2024-06-25T06:12:52.816638964Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -495,7 +508,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-06-03T13:45:21.353946513Z" + created: "2024-06-25T06:12:52.816250739Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -507,7 +520,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-06-03T13:45:21.353142151Z" + created: "2024-06-25T06:12:52.815857785Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -519,7 +532,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-06-03T13:45:21.352732211Z" + created: "2024-06-25T06:12:52.815462095Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -531,7 +544,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-06-03T13:45:21.352334795Z" + created: "2024-06-25T06:12:52.815065835Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -543,7 +556,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-06-03T13:45:21.351989927Z" + created: "2024-06-25T06:12:52.81471534Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -555,7 +568,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-06-03T13:45:21.351643917Z" + created: "2024-06-25T06:12:52.814334379Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -567,7 +580,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-06-03T13:45:21.35129452Z" + created: "2024-06-25T06:12:52.813975658Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -579,7 +592,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-06-03T13:45:21.350902194Z" + created: "2024-06-25T06:12:52.813466557Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -591,7 +604,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-06-03T13:45:21.364116334Z" + created: "2024-06-25T06:12:52.825178305Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -603,7 +616,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-06-03T13:45:21.36376841Z" + created: "2024-06-25T06:12:52.824844672Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -615,7 +628,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-06-03T13:45:21.363407161Z" + created: "2024-06-25T06:12:52.82451759Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -627,7 +640,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-06-03T13:45:21.362698854Z" + created: "2024-06-25T06:12:52.824191702Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -639,7 +652,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-06-03T13:45:21.361807359Z" + created: "2024-06-25T06:12:52.823863939Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -651,7 +664,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-06-03T13:45:21.361471147Z" + created: "2024-06-25T06:12:52.823535867Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -663,7 +676,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-06-03T13:45:21.360307792Z" + created: "2024-06-25T06:12:52.822403291Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -675,7 +688,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-06-03T13:45:21.355072969Z" + created: "2024-06-25T06:12:52.817207747Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -691,7 +704,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-06-03T13:45:21.350539272Z" + created: "2024-06-25T06:12:52.81195297Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -707,7 +720,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-06-03T13:45:21.349972637Z" + created: "2024-06-25T06:12:52.811331558Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -723,7 +736,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-06-03T13:45:21.349141275Z" + created: "2024-06-25T06:12:52.810681524Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -739,7 +752,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-06-03T13:45:21.347981356Z" + created: "2024-06-25T06:12:52.810082816Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -755,7 +768,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-06-03T13:45:21.347397349Z" + created: "2024-06-25T06:12:52.809518321Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -771,7 +784,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-06-03T13:45:21.346158131Z" + created: "2024-06-25T06:12:52.808868798Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -787,7 +800,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-06-03T13:45:21.345552283Z" + created: "2024-06-25T06:12:52.808285859Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -803,7 +816,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-06-03T13:45:21.344859932Z" + created: "2024-06-25T06:12:52.80772468Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -819,7 +832,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-06-03T13:45:21.344215902Z" + created: "2024-06-25T06:12:52.806393844Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -835,7 +848,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-06-03T13:45:21.343556895Z" + created: "2024-06-25T06:12:52.805157537Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -851,7 +864,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-06-03T13:45:21.342837122Z" + created: "2024-06-25T06:12:52.804509676Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -867,7 +880,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-06-03T13:45:21.341469051Z" + created: "2024-06-25T06:12:52.803884839Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -883,7 +896,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-06-03T13:45:21.340829309Z" + created: "2024-06-25T06:12:52.80325908Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -899,7 +912,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-06-03T13:45:21.339543493Z" + created: "2024-06-25T06:12:52.802625387Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -915,7 +928,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-06-03T13:45:21.338845252Z" + created: "2024-06-25T06:12:52.801959292Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -931,7 +944,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-06-03T13:45:21.338148503Z" + created: "2024-06-25T06:12:52.801268201Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -947,7 +960,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-06-03T13:45:21.337497129Z" + created: "2024-06-25T06:12:52.800516166Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -963,7 +976,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-06-03T13:45:21.336820537Z" + created: "2024-06-25T06:12:52.799093034Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -979,7 +992,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-06-03T13:45:21.335899539Z" + created: "2024-06-25T06:12:52.79835199Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -995,7 +1008,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-06-03T13:45:21.334762583Z" + created: "2024-06-25T06:12:52.797813955Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1011,7 +1024,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-06-03T13:45:21.334005641Z" + created: "2024-06-25T06:12:52.797257175Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1027,7 +1040,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-06-03T13:45:21.333450688Z" + created: "2024-06-25T06:12:52.796695726Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1043,7 +1056,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-06-03T13:45:21.332882671Z" + created: "2024-06-25T06:12:52.796129628Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1059,7 +1072,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-06-03T13:45:21.332237018Z" + created: "2024-06-25T06:12:52.795472902Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1075,7 +1088,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-06-03T13:45:21.331568342Z" + created: "2024-06-25T06:12:52.794812107Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1091,7 +1104,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-06-03T13:45:21.330974116Z" + created: "2024-06-25T06:12:52.794225Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1107,7 +1120,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-06-03T13:45:21.330415346Z" + created: "2024-06-25T06:12:52.793281916Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1123,7 +1136,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-06-03T13:45:21.329830537Z" + created: "2024-06-25T06:12:52.792249778Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1139,7 +1152,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-06-03T13:45:21.329215361Z" + created: "2024-06-25T06:12:52.79166731Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1153,4 +1166,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-06-03T13:45:21.328440806Z" +generated: "2024-06-25T06:12:52.790950752Z" diff --git a/packages/grid/helm/repo/syft-0.8.7-beta.11.tgz b/packages/grid/helm/repo/syft-0.8.7-beta.11.tgz new file mode 100644 index 0000000000000000000000000000000000000000..32078a0eae3ab90a7ae91b0dade2e48d0ea33e5a GIT binary patch literal 12295 zcmV+iF!;|OiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PMYMciT9!DB7R>D{z&b+nw2wj0ZuGTC3N+PU6mZCb7M3r+dxW zYcFp+iZUk@sUayR?(Ur5eimO6Nl_2WvYeS?d`K(;C=`G~RiRKQBzInV_~u5&>EKR| zgYO3DM25ByJnWGI8X z#7|HZA|q3Y(i@qg7{<~M6Xb^|iN+9xfFMX9K?%gS5Fgw`V;G{FG@T^F!^4rE-b~d% zN8`h@34|Z~5X|BE-DO6j2fof)u8PL;^Png`O(51$m_nF#*EDc@Xd;~)`s2}I>Zbu* zXHQ4}wSICR=#bsV085)#;{0v~c z1Z8VZ`XR*00YWj1{3MO1)DM~=ckfMSRkr&0t zz%2yPWSlK)^!5y$o?W5K<12Lamy=6$b%uUEdVhJ0PEOJJ#o61BZ>~^@|FzI~mIisz3G5Sm1BIz&`pA`TGBm{h@K z;wVas5jg&m#u7nz>&H=;^%LF7d@v!r9ing=1Vy>?$e=(f2om)FA{mBJnlq*(Z^XRM zbIS7hm!o-+I+)YQ07@VLoknp!#FNNmK*GqtW%<3`?gb>#G@GdR{@728nJ&I0=P@LZ z4pAWE5!5ppFn)Q1k~E6t2vFkxmX#}iUi!bm5b<~Z!F>EGGRQ`1fX;3qj(r0tz3~&| zd+08jqR%o+Q5t0fie%OuuRoTd9Dyk-53(lmzy1byr}>cQ)6_3M7OjUAc#Ku;Rt<#yK+(WQNk{2 z%xRJL%~oN9|9#^k<;zN$l23TK^ov!H0V#gZu`Yh+3#L)~o~*w)zK_Zq-CS#rMGvguO_;TE{;*eom zu(xdcp6LOpACB^V%-MM%Adkn=4~EDOz3BgsCJ>IZeg7beGX=bmT_ze!KNB^HZ(xXS z{L#$;s+hp*Tx^}zq59zm7)SBluTlI7;$(fD1@+M>J5}VKM^P{=^^9D)lKjUaIjth7%MVxQ=yKjdGEOIzf^huBPqOb7 z2hGSWFaOz3Z!!^ue#nYfob!jg8kCg^8CS#MCQ9;Eo=ye{gk}``W)PMdMX{6Oj>vy` zaWo#HFp6Ok+$~IceWxWS{wKIAK+O1e_1C}E7HqY=d|mA1pGre1$#S)tQd%1xvPtV` z=TX^|AC6*3@@{6-JLgRq>QTN=8Ou-Q0OxvmRxINN-NfGy1twkAm$gB|?)X4epO*3ZLcOmTmlcxzIwO zwuANV&GD<5HCzYL)C@*ZGzuU$g3rgh57|sWVFAC1KKIk;Fo>k-D;da8L%*D&7di%+ z&VJ7E%=Uk9UmVMtB}c6F_YoK53v?XnK;8n9X|^lPC#=$94{F~UYTZmmPbTkY0=<_> z^lSx8Z+#ub;X=-1*k?i+Mtf7Tn!Dy9`mg`~cRA5`PWMUq`A-VtNgz{54o}anjxPu4 zmvonHtUmvt7PCd`{D%ojp3i^wk#Z4wbtA)1xdxc&q<)yBG6*t7J(|WSh5%#&xlLA%!Q_Q6ijU3WR}l(8#vvLl;O&F!}~M!5723|2V_+b$dVplf{iPsmM-_0XFTl~;<>_LqKPuLZ8^+H zAuP#VD!5_Ven0ndst+f$gh9u}I~WO+}Dg;49vykyT>g{rj-RV^GQg*Mm~ zK$W(rV3HHp5P~Vw!nGwR*hu?T>6giDF0+Jy%b#}1Y#HQcj7WV38}f-9~)O&J#; zwa}i;xN zJ>qd=D=A&=+AannJz^=wK?!T);jZCnufo$_f~V`)hGJq72Qv=bRfGyh7*BdOmXb=3 z2!{ww9M{rdQK>BDIuwjxgbE99$1`3Bk-C61_Tg3rukSuyyx)QNv0P>;Pf(=@mKsZ2 zVC@L01mi^Tj719zu!|kyTFmA);})|71KZLb;jmp#%B@?s6y1vT*+y$>+b)+%+tLMX zTq6ttn`0qek4Tql&9ycJyN&>tTEtbJ>sZv{o-4Pc@vWAlp&n%xAubi#c7QT0q+=Q4 zU}-RCih-php+;CzN!v2aGE!4&X%~Y-xY=sN-{fYsw{T&-U~PjiBqrjzL|2u4%J z1a@6el32nPAdTRj)i^ zt_5%?CTxM)X*d9os98k8#j%Rb%Hrwg9Z*uEgv1U)kas+Q~WH^Ey z=mNk!?7FUnrLZaUh{ZilJO+d-S4fv&NsRD}B%UnQ<&-juDj*C~#awP|%k_v>QfbaPa3w)2 z!na$`WhW+nB@TAb5yEUDJZ3c5*ps$PElL0z*MO12^9-{&XiSYIu}w8`IKj$tEH0D{ zN@BWIN6usaR;F-tezL=!U~GXVuHcN?%(5_0$F-FOj2j_shB>3uWfmvYvjp>)FxVAxZEd*>D5H*I*fK5^ zObJee@V4qmv6n3u{0=)~8wWhkSlIFiHx?6)BA&;XLjl{ADi^r4nJ}(V%%a9(+5?TT z2h`QJLl4xMI@QC9I0LF#ASB1}|+#@O*YfHI?TU1~y z2_c@VEoBUKRHjP1!Z8l!#whA3rU`bmb}ej(yM+m2EAl0ZdxmCv5UniN_8csjv@OYn zV79S@kQxXW*LGc-Dx$FEYS5mrZI9YE(OX-l&H>_GXrdYeb3HADF^XH1bB7bx*phfc zbM9G0Ys`eQ8KJ_Zj4|O-i7gL6ae8&YW9r!99BYe#3(RAfTFfC#QwGWg z&=kAGaVaxGN$gs#qm5D82*~Kk;%sBOY4&H%(EoD^Q&`*)Z`BvWh@xOTC2V0~q8);{ zV=1Y%p#m(A+ct5CX5a~dv%jT7A=_vI#hzzxXQHT|tL)4q&MB7Qc%H@@bK!Z?p+YO6 zs4XOMy^P;eVA^9EOJ>`))XV}Zh_a2r@?LxUZsxh88L^D=Bz6e%xMZGCfEBZ;<$4}9 zwgBdGX@quxFh_GqDD||+b`Xxy+EVTc{d#pjegQx5&6_9;p{nuV%|8*Y4MJg}U66o{ z^0=!#!!^~^!yrIO?lQm{q)oUWf?75gR8b)%SZdp@)Hl_0x5?e@tX+qC6qGjF_JA^J zbBT#XunUYSDYWGXrG%E;rc6+#Jq}vf7WEw4q0Ynjao2j;YI0l<+A!C%rR%vE9L-&B zaq8HdQx4d5T;mbjaVWE;_6(7>&B0(xC}{};+i|hKM(QN=q8%)E$2H1WGUr#evNAra zfOwp`uHrVA#B~I*90A0Vgkp;^g{7rE8w+I{SYe4=OpgrXZ9CEiCMC?19AC!JJr}a#u0o3I^I>&)RPDmO%U_GOz=iDwZOX5Qclg zb}&|UCNc!~JSnKK4b~oWv}KIugkVA}i(z63<7r2EMy)y)-uammp^m^@a?f}y8%z$K z!-b+;JJ_{t?NZ0hRe#W!S===eT*YiyO;*MU_*>Z*P+U_ zY;I|wLS{Q1+on|8o}{+%fO&$uQaIF-ni4~>#a!$WEV-1vNSMmwCie%QYFQ9O8WKjOQq1O^{1eZ2r zR9X(PxC0I|irJR&jOS8im?a&7U1nTiQ{#aHgxSEc<0!rP@p5xv#PG8p1Uo>VQq3IY zNMk!zwufL2!OW&$Ys+ycH4LarTw!C6X7fP}F~B@34RbAEWP71-_BjNzgAORJm9i`& zEb2*4lq-#w&4g0iHm>J!EgdQ;_Z)6Jl5)&ln<(RA+qech+jZcB{{_rRIGLu!lGuff z7#s^M?Gj+Z5kvrymcT5xzzAT9fGrttfwSF{vH)xC7>2RqNRDl_ot9K?Sa;#GC0$So zTUs-46qCRhrk=5=v;|m#+C*Z>7*-OvM;zBUp5+i%d(^>}AS({byU;)QTDZ^Uce=+Q z!zA|ytZoMt@nb&wL4Xt#H-u&C>-^%c0yOqVu}s0BG|Hi*H{i!8_g_uh{(EK`X8<$k z)5$271`3zNr*^yiW>U-l_qw>{G)N~qj0CeL{u9N-YWe?KOt5GF-+iRte?R>5!JBA2 zjzW}s`l3@ghJ%lZ9OaiVfB(IY{7?r|1E{C-as!=}3*{JkXn?+cJ^1tC*RKazK?9y2 zc9!dxB_^>SrXK43De3{F(^tPMxV)^n?UtirdYg9MBzkWTqfnYpHZVvNN$V;uG zg4Xaa3{G(&|NV}J=n=ZSWBRG?c~(8%l!)5(9(SbboqlRw^qlse+VX!!+&feO*2sU( zar^!cC;U17<6ctE=guQD<7z1I$9{??`K83ZqIG3>;=eKyXJk&>*X|9_8m~CSG7|cu5M`t0R$eZI*Vo}_Ue1*ooj-yvaFpyVe zSc+Fg0J2x*CHPl}R)1LKwfJR>b`kkXi}=257Y%Eu1-_8{(YL?PfUNr;!bCCIj4RF55k)_ zvI+UxO=xU00xmhU@1Yx+{0;8ryn0Z`m8w8!;7dF650plKm*Zf*T?`Uf*qa_-T%27D zQE@H5h-CAp;4YbA@bV@4 z19{R97PhL5ZAY>1xP-JNj}xS10tQ84DHBl?qcSi`8NC;sct}O*O^(ncj&6Me=2f#0 zO68|qs%kJ0Y8V`pF@+M8MhM1}^sZ`8hM7)K(7~Xl2Gqh6bZiReGPSM^RPwdb2j=^r z;u`KYFlS#klD;o%jS{+jtIeJ~1Pj{Iie`{dAHB^W{dkrK+m=D57!^JWwL-HHI3+pE z^{INbC(@@H%9QV+L9KK7&}ZLkAG_COR(DLic#T8$Gn-NpR@IpVBGpx`L zUUneYv2dSSjo&)8_kU%m&Yc>7*X;i+E_mDiYm?{r-+M{9ym=o*pQe+7r{n?FkPPRi zF=gm4^y|s{_jwrZOcdD`GN7Y8fEW}t2WvKq6={uXtF-vmL%k##LmB<6Ul+PhjCGfPHR_+sI1>V8slf_kU~SCiZ0q}Cs)j$IUgL97{H;u7miJ>& z%xmwK^ic1z9O^;U=wkScvbmRo`~WUjNXn`7a($o<|Hn%Is(l-Lg1eU`J`F^Fs`Rl| z;TrBU6=>FCS+@>uetghtW-l0@n}*y>p^_DSM=_FR47$N}>P0v@_ zyAov8VA+qzLji=N^ak11n)R3o*o_us6WKg4W%coW-c}FUR(+kKVxc1Ru?Q%b%GB4h znEyq;e$3-VmdG1F$-{EJ+!e~RL9G7j=dCVwCLj3@+`nli)93^0qt{UsWI*aFU`E~> zkz}^y%08M1@<8kP5~#IQ%yI@aOxx{T-9SZgCCYQil?16U^%KPCU>>rxF#>g3os$-KF24UFgGCPAjDVWu-$zWAnWb#ZIp~~Z zE5?p8owXX-vh^BQZDG*h)^o&vT2io~oww+(4yh!C;ZIeml6K~pF}l3%tjbX^xsj*S z@eI}W!dv?|*ShoBBT`%zBRhwwKg+ z|BD=a@0p|q5TBe6(Pe+!HH z9%;?`FD3WGUItsY{snFC|AqDZ{@;F5O=#xoW~Q(-C%;#?m2|R+JXrT4m-zkn2F!fj zJwBT#0{)*(V4ytg)3 zc&DtWWZK2U$UuqPpgWK{Y8XZi5W1gUv3C9ElAtej{*1tOj9{Jrr)bB2rG!%Yy#Du* zR>}XPV$xTr0WI>_%QElx`}br=ZEVX&22eXhK0?jL+r(c$>n-VgZ;woFC7%5>`UK&^ zIK@ipft;o{QSASgcY5&2Dcnt~5xj_kRZ3sgPMj6nM`z<{0C@xbOwoE5N7G3%ME}+6 z{dW$dc7XR^y-8$}-m9D$dj4oEC;6|Et(O&UAy#?eBG!FYAlqfXLcPFG^8Y`}RNrKO zOpEKYuTXDNeKHV03OT3^@i`2lQM>8R20LJ?Sw;)+KO*(m{gIPN($#d5$`rh5knDU& zwG$88`+?b0Wb=6U^)}WPue>tqg<5ZyRYfaA^Xvvv+!jtpoI=MsN=8L*oFQ^Kg-+W%q8e&+vsNp=38seN<(>av{r%T;@-y$XUD z@^`qB;xWTQoxDtAnZoFHA9%d(L<_lp&=ab-K)m zK8ehS{D#^X!tzr35S_&aVlWpV&0Qu3%T-!-qc$a33K~7yd@7v!qbVEqrq)7BjLcp` ztV7KTmOgY#3NJyYCNEYtS$l4=HZ%2S_Ew-!uiS}83yX>xm*G=?%W)2*`NCuF|G2c$ z%;RtqbWrsKBgD1Y@3*!Hvof`7`uw*SQS|le;p~CU!%^&;ehF{5Sm}0t zLABMy`7{VBl0FJP%e$mj;(@NejX|6{l+OHgyYtUWr_lNMRzAU1g79|Oh-mPlW^+OD zG)(<5GzwHB{x|3H`uyze_36=v<3<5g95w$ON8@2D10m0cz?3_^&b)J(-VA4pJ~u@z zZ9Z%I^8Dz{Be$RXjCCUM=Ir$9=;ZYH;`%RVmsed_3{mfslk{3coGBxn08h`}9$#Pm z{k*eBiyCj~>_78?C>{asp+C?+r%?*cscyBhY~B`I$tu-a`LTAlJ|)8vTE72V1NhCw@zK@s_4&ow+mCOKFP5y^>T%e``X{F!jz64T z{QdgZvx~nSUtDg6UhBo}O{u78D5P9K6yQ$yPfqmP%z z*C(e}#}}tZ@2@Y8-<@1uUHpAB96DZMU)EZ;C8hPY{JKGV{qfD;j<4<^coth&fnGyZ zT!nR~{bv#9*B{@#JHEI+K0SK<{_~Mt7H>6`aQzZ_p5U0oet7E0-AEv7~}UClgP z=YsIIt5}zvGAp{=TD7Fr+QDUeFyzGVXW6Zu&&-79%_GcwRPy+3AF^%90?={xwJ;GK zW~=_t$W$Kc7))~5YNBXA%YrcVWsvl2z`) zM+%zQ-lXZ|9i(lZF)6T@6aVm*9#)2~|8J53_^A%9T`wQ0&hhl5A53Gox``pVi2^f3 z3^xi)Aoe432|5Z*GDM`F8~C>nLXvFTgA(%wn9WgdW5ye~sXvBjn$EKMx^CCt9aZoL!t=Z*JVG zV=%q-brgs1D^u4JUVU_azGVY)GFb+EC2?)SiQ6^q&5&2I(4ym?APynuC!nXXpWd~) zUT3q(U2<5IKI|-AK};v(zJc7GXE`tfw} zu`?gR{@jPAN-gPQS)A{Hqi@W@SK#(EoP2W@z5?|plVwrTDW-ebHnI4+c z@51T`8@a01xWL2?5A)R&gNk*Ef3~|S48U> zHn>9`Ei7AlAN*(UvW?R+|F5YZ7>M_B{|7V6TDbpX+0Xu8`$${f|LG==$Mps)+C=CD z@(tvVK_oZPG%!d3O5_-jES{10N2Gp8(5&XcGdEa~TI=77Gi5I{vp@O#iUNHYac$@cN8Og1-{@m9XKfSjteruD$4agw9+0-P`h1lcR6V`5gpIT1- zyQa1J-wtkouG{}{yS@Lz^f~_1e$qYfe?E=g_cS*^Yklpw0y^FP2Iy1Ic|En(eL>B9YQ`n>LkMo^N`S#jO^6>LyCk&R%C3UEb8$CcFZh?YxtY(O%NG zesyHpINfZH(y~$L>QhJ0=6DU!VdM8zu4tdf;Q3z;x<>WB*ew_!cZ=XQD7CM#-yfqN zU;Zd3{>vW+e}4H*B4;9<{1JnR^v>~{xkqn~A!BR-M-@csaM=K!j3+@J2O(!{mlM#w z=j%2zS;D*0AZf0tG^j1&*&c57TAsUuFJ#vIz|rR$2+`I1%j{#>=E5b|6==32q*gxd zOL=T}rE*l->~=%>-_rl`TQ|kOyNUh%-A3`hN!$KoQ~a#|?IYdO{?nw4M^*arre> z$xqeGd#M;`$yreS@TVyI)N@`>tJeQ6Vn2Sj!TX=q;`<-MdOrWzOWHR6nv}gU3yF3!5NsX90xmVKhSHifO%h+!H!xx^cO6M zZH2R7USKGm!o#oV`3m>GR)GgxSS=#joSdFqy?s58WU*c7oOXDcS=N2x;4LEbZyIi= zbB%0v)plNXrgF}3vn{4}xnw?BJH=b;vWb^ad|+Y9giJ?sDbN$U=! zJM9B+a?1E1CmkDuI_*5_{-8Mcp1t#T7s|1(2Y2IOi2hhOgfF%58e5oMeyA~uAd=>_ z3}mPwZZhnwenZKs*wQo(hNzb}bvSdHo(DDRI9+{cBMV`hW@oayY-JAAVT@Ic#nCiv z)a@F@cpAVE^)g9VoHhUWKZpPGve(en()jK?@;fAS?=z@g$5}M$Z8W%DZOe)OKQGZA zf1t)!6X~SmbAD{T18f&&Nd=MWSCVKA##z}N;4quRdiK~iCVV&#by53;630@#WJx$VRT*~my5(xbzcgAVbXLpksK{wQZ*q==DF>c2)$-ZS< zntbFNd0v$17lBM}W^qo^Sc2z&s*H=sYA!si*|tm}#igytxW!n<)aO+F^M zUQ*;2EHBr6-bbS$`u%J1DTqe1%|6^hFhnP(Kc5wO9dz*-)VzDrm>7~|h=vY6sEm|; zIO;Cp(4tJe#Jq(e>SYM^`t|wo{GdkI*{?z%973uO%P~8wvFT?Y;%_0X(EoOD z^Semv^gmlH`2XAD+5clN=|1}3J?@8BUWt#b{(WON!>ew8#VO)%|d^QEi8AVb-i^Tb-uOGB4kuZ*1Ib#NKNLdV9cS^lTtD zj&avLYEKf~ZUYM}S`1^?N4heb)W@UKYO-{~cCIt8(Es-3|3ffa@c$v~`To~l(pL7r z<^DnY()*T3g(~7=TZw2X5Zyweqw)fB9haz{#zmA zf@Xj0BmReN(f0ih_I&?iKdHq=t7(6L2&h0jsuY+t^0ZvI*4G&$?k|o*7awkL;l>N< z81In_uV4QU_X1t#|7r8Z_^*sUum63dd#?ZcxQIRtBWPO!Yu=YNg#HB_NNdGL)yxAxD3Dp&4)9v_q8B)SKOfyamLwK*b%ETL;4gZ#Muz6 zup%y&d4P*(^*-qJ<<;56(Yxd8x5w{~ua58i$mMcox{1$Yqxdc#dz#JodZS~meAK$S zq0#YH#)pBg4i^|jky>c%=&7dpBS&ikSuP)er+L}qKJPOv$K4P8)R)0q2;|)o8|M>6 z>%6}UY8P?r%L}zZwflBNovr#MzNnA96KLuGGq;-gUk~>KU1$Hb>~{PIf{A$6|M!tL zeHrxLexPmodDLCNJSjqHoZCA{we6L@7nR&TamOH=9R`&^*eV)AqmTkiO zo4i5dI8K+NSM|99n6QoTbGq?!+W6&L>5U*-7?Tc&`R8XP_0iKN{QsdIAglR5A&m33 z{nw(;{vUfu+wlMUIDtHsAo!Py7gisrhSy3{xP1HJ!WKisFEt8p>Mqh%V@rRL2B@tZ zMwUW&pvTA@?tNWGmejnb&q&hP^qsEV=b<%zCXEB2oxE2%zw`gCJy<%QffX(*9fcm? zvvT?C&zIM~oW1|};rROUoef(!|8=bq<-eJ?P_z6l-Gyel z`v(3(HSN6xq}cPUhv3%3cWmzrw5cc1iq1WaD^U5cLHRQ8ZyUP>EgRq#k~ZFn#}8V4 zBNDCj6?;*gcK=6tc>Et_VYCoouOmcXr!U!DxNH!lZ?k2$c(h*c!Ti)4`62rLFJ~W) z59gH+S9Budd-bC81N#=*v*>|KODy%Q=E0bHP|PD$9%P ze^|Pl8R`Sm>94S2B$Cfwa@h2SST+nROFi94H09&MfE0rvgP@eu2}&b$7*E5)^7mn0 zVkOC}E{qz|t0>sNjoKr$zVjRRz0UCNC9oS6?u0R^*p6S<`AF7%TJ9qAHPhUf;TL6~vTC3=+q+hPHrct}*L}8A9`4J3wz=cqw*zfW zM_=Ya`)oyCnsWQ^;0A)RKMJE59;1zQ=l`6w-+!PMVbAv8eWVxYT&5|+VJ?vi8Ymt% zR8v1NemFuCsXxgPB!hz&=<3E#P%@q5+eeh#KoI1?4+~4FAC6w37y_C4w~%jeYTspO z4ql)TM!EPxKbBD!4f^b-H|YO)IY60QMp2m8%uveqGAQsv7#s}VUS40OQ49w!(3_ld zpU4hxxw+`UeB`TYQ-RTYb109cKSjKa<<=a1KaG{b@SMmD%Lr&x7Ri zmj2|9UuoE!`$ar6(+>A}D^AP Resources and increase CPUs and Memory. diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index c7925370965..0c37af8ba93 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = syft -version = attr: "0.8.7-beta.10" +version = attr: "0.8.7-beta.11" description = Perform numpy-like analysis on data that remains in someone elses server author = OpenMined author_email = info@openmined.org diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION index 61a3b991302..c06d53659ea 100644 --- a/packages/syft/src/syft/VERSION +++ b/packages/syft/src/syft/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.7-beta.10" +__version__ = "0.8.7-beta.11" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index 994de159775..eb2fd2df975 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.8.7-beta.10" +__version__ = "0.8.7-beta.11" # stdlib from collections.abc import Callable diff --git a/packages/syftcli/manifest.yml b/packages/syftcli/manifest.yml index a9e200f513d..afc84a1c936 100644 --- a/packages/syftcli/manifest.yml +++ b/packages/syftcli/manifest.yml @@ -1,11 +1,11 @@ manifestVersion: 1.0 -syftVersion: 0.8.7-beta.10 -dockerTag: 0.8.7-beta.10 +syftVersion: 0.8.7-beta.11 +dockerTag: 0.8.7-beta.11 images: - - docker.io/openmined/grid-frontend:0.8.7-beta.10 - - docker.io/openmined/grid-backend:0.8.7-beta.10 + - docker.io/openmined/grid-frontend:0.8.7-beta.11 + - docker.io/openmined/grid-backend:0.8.7-beta.11 - docker.io/library/mongo:7.0.4 - docker.io/traefik:v2.11.0 From a2bd3e28e609aad294f85f288cf9e62f61d3a61f Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Tue, 25 Jun 2024 07:00:49 +0000 Subject: [PATCH 289/313] [syft]bump version --- .bumpversion.cfg | 2 +- VERSION | 2 +- packages/grid/VERSION | 2 +- .../backend/grid/images/worker_cpu.dockerfile | 2 +- packages/grid/devspace.yaml | 2 +- packages/grid/frontend/package.json | 2 +- packages/grid/helm/repo/index.yaml | 185 ++++++++++-------- .../grid/helm/repo/syft-0.8.7-beta.12.tgz | Bin 0 -> 12296 bytes packages/grid/helm/syft/Chart.yaml | 4 +- packages/grid/helm/syft/values.yaml | 2 +- packages/syft/setup.cfg | 2 +- packages/syft/src/syft/VERSION | 2 +- packages/syft/src/syft/__init__.py | 2 +- packages/syftcli/manifest.yml | 8 +- 14 files changed, 115 insertions(+), 102 deletions(-) create mode 100644 packages/grid/helm/repo/syft-0.8.7-beta.12.tgz diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 9c44c997441..ef70d88f6ae 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.7-beta.11 +current_version = 0.8.7-beta.12 tag = False tag_name = {new_version} commit = True diff --git a/VERSION b/VERSION index c06d53659ea..99dffe7cb16 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.7-beta.11" +__version__ = "0.8.7-beta.12" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/VERSION b/packages/grid/VERSION index c06d53659ea..99dffe7cb16 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.7-beta.11" +__version__ = "0.8.7-beta.12" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/grid/images/worker_cpu.dockerfile b/packages/grid/backend/grid/images/worker_cpu.dockerfile index e51577f7317..c566cb841b2 100644 --- a/packages/grid/backend/grid/images/worker_cpu.dockerfile +++ b/packages/grid/backend/grid/images/worker_cpu.dockerfile @@ -5,7 +5,7 @@ # NOTE: This dockerfile will be built inside a grid-backend container in PROD # Hence COPY will not work the same way in DEV vs. PROD -ARG SYFT_VERSION_TAG="0.8.7-beta.11" +ARG SYFT_VERSION_TAG="0.8.7-beta.12" FROM openmined/grid-backend:${SYFT_VERSION_TAG} # should match base image python version diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index e946605d9bb..650e3120ed9 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -27,7 +27,7 @@ vars: DOCKER_IMAGE_SEAWEEDFS: openmined/grid-seaweedfs DOCKER_IMAGE_ENCLAVE_ATTESTATION: openmined/grid-enclave-attestation CONTAINER_REGISTRY: "docker.io" - VERSION: "0.8.7-beta.11" + VERSION: "0.8.7-beta.12" PLATFORM: $(uname -m | grep -q 'arm64' && echo "arm64" || echo "amd64") # This is a list of `images` that DevSpace can build for this project diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index bfba367268b..586d677afb3 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.7-beta.11", + "version": "0.8.7-beta.12", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index 09fe1ae3588..5501c3be72c 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,22 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.8.7-beta.12 + created: "2024-06-25T06:53:34.402656615Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: e92b2f3a522dabb3a79ff762a7042ae16d2bf3a53eebbb2885a69b9f834d109c + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.7-beta.12.tgz + version: 0.8.7-beta.12 - apiVersion: v2 appVersion: 0.8.7-beta.11 - created: "2024-06-25T06:12:52.839343993Z" + created: "2024-06-25T06:53:34.401975147Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 099f6cbd44b699ee2410a4be012ed1a8a65bcacb06a43057b2779d7fe34fc0ad @@ -16,7 +29,7 @@ entries: version: 0.8.7-beta.11 - apiVersion: v2 appVersion: 0.8.7-beta.10 - created: "2024-06-25T06:12:52.838636311Z" + created: "2024-06-25T06:53:34.401295772Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 00773cb241522e281c1915339fc362e047650e08958a736e93d6539f44cb5e25 @@ -29,7 +42,7 @@ entries: version: 0.8.7-beta.10 - apiVersion: v2 appVersion: 0.8.7-beta.9 - created: "2024-06-25T06:12:52.845105768Z" + created: "2024-06-25T06:53:34.408352854Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a3f8e85d9ddef7a644b959fcc2fcb0fc08f7b6abae1045e893d0d62fa4ae132e @@ -42,7 +55,7 @@ entries: version: 0.8.7-beta.9 - apiVersion: v2 appVersion: 0.8.7-beta.8 - created: "2024-06-25T06:12:52.844461064Z" + created: "2024-06-25T06:53:34.407714717Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a422ac88d8fd1fb80d5004d5eb6e95fa9efc7f6a87da12e5ac04829da7f04c4d @@ -55,7 +68,7 @@ entries: version: 0.8.7-beta.8 - apiVersion: v2 appVersion: 0.8.7-beta.7 - created: "2024-06-25T06:12:52.843798366Z" + created: "2024-06-25T06:53:34.40700263Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0dc313a1092e6256a7c8aad002c8ec380b3add2c289d680db1e238a336399b7a @@ -68,7 +81,7 @@ entries: version: 0.8.7-beta.7 - apiVersion: v2 appVersion: 0.8.7-beta.6 - created: "2024-06-25T06:12:52.843137972Z" + created: "2024-06-25T06:53:34.406389289Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 052a2ec1102d2a4c9915f95647abd4a6012f56fa05a106f4952ee9b55bf7bae8 @@ -81,7 +94,7 @@ entries: version: 0.8.7-beta.6 - apiVersion: v2 appVersion: 0.8.7-beta.5 - created: "2024-06-25T06:12:52.841960362Z" + created: "2024-06-25T06:53:34.405770749Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1728af756907c3fcbe87c2fd2de014a2d963c22a4c2eb6af6596b525a9b9a18a @@ -94,7 +107,7 @@ entries: version: 0.8.7-beta.5 - apiVersion: v2 appVersion: 0.8.7-beta.4 - created: "2024-06-25T06:12:52.84114054Z" + created: "2024-06-25T06:53:34.405139654Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 387a57a3904a05ed61e92ee48605ef6fd5044ff7e822e0924e0d4c485e2c88d2 @@ -107,7 +120,7 @@ entries: version: 0.8.7-beta.4 - apiVersion: v2 appVersion: 0.8.7-beta.3 - created: "2024-06-25T06:12:52.840532705Z" + created: "2024-06-25T06:53:34.403876083Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 3668002b7a4118516b2ecd61d6275f60d83fc12841587ab8f62e1c1200731c67 @@ -120,7 +133,7 @@ entries: version: 0.8.7-beta.3 - apiVersion: v2 appVersion: 0.8.7-beta.2 - created: "2024-06-25T06:12:52.839921723Z" + created: "2024-06-25T06:53:34.403232787Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e62217ffcadee2b8896ab0543f9ccc42f2df898fd979438ac9376d780b802af7 @@ -133,7 +146,7 @@ entries: version: 0.8.7-beta.2 - apiVersion: v2 appVersion: 0.8.7-beta.1 - created: "2024-06-25T06:12:52.837943086Z" + created: "2024-06-25T06:53:34.40063933Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 553981fe1d5c980e6903b3ff2f1b9b97431f6dd8aee91e3976bcc5594285235e @@ -146,7 +159,7 @@ entries: version: 0.8.7-beta.1 - apiVersion: v2 appVersion: 0.8.6 - created: "2024-06-25T06:12:52.837431521Z" + created: "2024-06-25T06:53:34.400141236Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ddbbe6fea1702e57404875eb3019a3b1a341017bdbb5fbc6ce418507e5c15756 @@ -159,7 +172,7 @@ entries: version: 0.8.6 - apiVersion: v2 appVersion: 0.8.6-beta.1 - created: "2024-06-25T06:12:52.836903905Z" + created: "2024-06-25T06:53:34.399615239Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: cc2c81ef6796ac853dce256e6bf8a6af966c21803e6534ea21920af681c62e61 @@ -172,7 +185,7 @@ entries: version: 0.8.6-beta.1 - apiVersion: v2 appVersion: 0.8.5 - created: "2024-06-25T06:12:52.836369456Z" + created: "2024-06-25T06:53:34.399046622Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: db5d90d44006209fd5ecdebd88f5fd56c70f7c76898343719a0ff8da46da948a @@ -185,7 +198,7 @@ entries: version: 0.8.5 - apiVersion: v2 appVersion: 0.8.5-post.2 - created: "2024-06-25T06:12:52.835618063Z" + created: "2024-06-25T06:53:34.398297737Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ea3f7269b55f773fa165d7008c054b7cf3ec4c62eb40a96f08cd3a9b77fd2165 @@ -198,7 +211,7 @@ entries: version: 0.8.5-post.2 - apiVersion: v2 appVersion: 0.8.5-post.1 - created: "2024-06-25T06:12:52.835052767Z" + created: "2024-06-25T06:53:34.39775036Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9deb844d3dc2d8480c60f8c631dcc7794adfb39cec3aa3b1ce22ea26fdf87d02 @@ -211,7 +224,7 @@ entries: version: 0.8.5-post.1 - apiVersion: v2 appVersion: 0.8.5-beta.10 - created: "2024-06-25T06:12:52.827481057Z" + created: "2024-06-25T06:53:34.390273423Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9cfe01e8f57eca462261a24a805b41509be2de9a0fee76e331d124ed98c4bc49 @@ -224,7 +237,7 @@ entries: version: 0.8.5-beta.10 - apiVersion: v2 appVersion: 0.8.5-beta.9 - created: "2024-06-25T06:12:52.833586212Z" + created: "2024-06-25T06:53:34.396319949Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b @@ -237,7 +250,7 @@ entries: version: 0.8.5-beta.9 - apiVersion: v2 appVersion: 0.8.5-beta.8 - created: "2024-06-25T06:12:52.832848044Z" + created: "2024-06-25T06:53:34.395578407Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298 @@ -250,7 +263,7 @@ entries: version: 0.8.5-beta.8 - apiVersion: v2 appVersion: 0.8.5-beta.7 - created: "2024-06-25T06:12:52.83210726Z" + created: "2024-06-25T06:53:34.394805848Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd @@ -263,7 +276,7 @@ entries: version: 0.8.5-beta.7 - apiVersion: v2 appVersion: 0.8.5-beta.6 - created: "2024-06-25T06:12:52.831280094Z" + created: "2024-06-25T06:53:34.394062353Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 @@ -276,7 +289,7 @@ entries: version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-06-25T06:12:52.8305338Z" + created: "2024-06-25T06:53:34.393327654Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -289,7 +302,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-06-25T06:12:52.829762529Z" + created: "2024-06-25T06:53:34.392595029Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -302,7 +315,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-06-25T06:12:52.829015674Z" + created: "2024-06-25T06:53:34.391850402Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -315,7 +328,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-06-25T06:12:52.828233382Z" + created: "2024-06-25T06:53:34.391057434Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -328,7 +341,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-06-25T06:12:52.826458486Z" + created: "2024-06-25T06:53:34.3887868Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -340,7 +353,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-06-25T06:12:52.82557704Z" + created: "2024-06-25T06:53:34.388419701Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -352,7 +365,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-06-25T06:12:52.823206712Z" + created: "2024-06-25T06:53:34.386042961Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -364,7 +377,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-06-25T06:12:52.822811543Z" + created: "2024-06-25T06:53:34.385651898Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -376,7 +389,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-06-25T06:12:52.822038149Z" + created: "2024-06-25T06:53:34.384915345Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -388,7 +401,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-06-25T06:12:52.821634985Z" + created: "2024-06-25T06:53:34.38451279Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -400,7 +413,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-06-25T06:12:52.821215782Z" + created: "2024-06-25T06:53:34.384105926Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -412,7 +425,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-06-25T06:12:52.820809072Z" + created: "2024-06-25T06:53:34.383688974Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -424,7 +437,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-06-25T06:12:52.820394087Z" + created: "2024-06-25T06:53:34.382888916Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -436,7 +449,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-06-25T06:12:52.819768941Z" + created: "2024-06-25T06:53:34.382124342Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -448,7 +461,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-06-25T06:12:52.818909746Z" + created: "2024-06-25T06:53:34.381731875Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -460,7 +473,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-06-25T06:12:52.818447262Z" + created: "2024-06-25T06:53:34.381342294Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -472,7 +485,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-06-25T06:12:52.818020285Z" + created: "2024-06-25T06:53:34.380945099Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -484,7 +497,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-06-25T06:12:52.817618384Z" + created: "2024-06-25T06:53:34.380541652Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -496,7 +509,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-06-25T06:12:52.816638964Z" + created: "2024-06-25T06:53:34.379580107Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -508,7 +521,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-06-25T06:12:52.816250739Z" + created: "2024-06-25T06:53:34.379153818Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -520,7 +533,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-06-25T06:12:52.815857785Z" + created: "2024-06-25T06:53:34.378762103Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -532,7 +545,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-06-25T06:12:52.815462095Z" + created: "2024-06-25T06:53:34.378366711Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -544,7 +557,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-06-25T06:12:52.815065835Z" + created: "2024-06-25T06:53:34.377970237Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -556,7 +569,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-06-25T06:12:52.81471534Z" + created: "2024-06-25T06:53:34.377619408Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -568,7 +581,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-06-25T06:12:52.814334379Z" + created: "2024-06-25T06:53:34.377235889Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -580,7 +593,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-06-25T06:12:52.813975658Z" + created: "2024-06-25T06:53:34.376259338Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -592,7 +605,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-06-25T06:12:52.813466557Z" + created: "2024-06-25T06:53:34.375661466Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -604,7 +617,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-06-25T06:12:52.825178305Z" + created: "2024-06-25T06:53:34.388018558Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -616,7 +629,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-06-25T06:12:52.824844672Z" + created: "2024-06-25T06:53:34.38769439Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -628,7 +641,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-06-25T06:12:52.82451759Z" + created: "2024-06-25T06:53:34.38736388Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -640,7 +653,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-06-25T06:12:52.824191702Z" + created: "2024-06-25T06:53:34.387012901Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -652,7 +665,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-06-25T06:12:52.823863939Z" + created: "2024-06-25T06:53:34.386694855Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -664,7 +677,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-06-25T06:12:52.823535867Z" + created: "2024-06-25T06:53:34.386371027Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -676,7 +689,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-06-25T06:12:52.822403291Z" + created: "2024-06-25T06:53:34.385254662Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -688,7 +701,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-06-25T06:12:52.817207747Z" + created: "2024-06-25T06:53:34.380137053Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -704,7 +717,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-06-25T06:12:52.81195297Z" + created: "2024-06-25T06:53:34.375192687Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -720,7 +733,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-06-25T06:12:52.811331558Z" + created: "2024-06-25T06:53:34.374621725Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -736,7 +749,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-06-25T06:12:52.810681524Z" + created: "2024-06-25T06:53:34.373981855Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -752,7 +765,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-06-25T06:12:52.810082816Z" + created: "2024-06-25T06:53:34.373383522Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -768,7 +781,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-06-25T06:12:52.809518321Z" + created: "2024-06-25T06:53:34.372818762Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -784,7 +797,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-06-25T06:12:52.808868798Z" + created: "2024-06-25T06:53:34.372173722Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -800,7 +813,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-06-25T06:12:52.808285859Z" + created: "2024-06-25T06:53:34.371631885Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -816,7 +829,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-06-25T06:12:52.80772468Z" + created: "2024-06-25T06:53:34.371036718Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -832,7 +845,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-06-25T06:12:52.806393844Z" + created: "2024-06-25T06:53:34.370119312Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -848,7 +861,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-06-25T06:12:52.805157537Z" + created: "2024-06-25T06:53:34.36878117Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -864,7 +877,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-06-25T06:12:52.804509676Z" + created: "2024-06-25T06:53:34.368166107Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -880,7 +893,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-06-25T06:12:52.803884839Z" + created: "2024-06-25T06:53:34.367545843Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -896,7 +909,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-06-25T06:12:52.80325908Z" + created: "2024-06-25T06:53:34.366874793Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -912,7 +925,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-06-25T06:12:52.802625387Z" + created: "2024-06-25T06:53:34.366251454Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -928,7 +941,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-06-25T06:12:52.801959292Z" + created: "2024-06-25T06:53:34.365625409Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -944,7 +957,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-06-25T06:12:52.801268201Z" + created: "2024-06-25T06:53:34.364985248Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -960,7 +973,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-06-25T06:12:52.800516166Z" + created: "2024-06-25T06:53:34.364262682Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -976,7 +989,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-06-25T06:12:52.799093034Z" + created: "2024-06-25T06:53:34.363203034Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -992,7 +1005,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-06-25T06:12:52.79835199Z" + created: "2024-06-25T06:53:34.362267008Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1008,7 +1021,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-06-25T06:12:52.797813955Z" + created: "2024-06-25T06:53:34.361714361Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1024,7 +1037,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-06-25T06:12:52.797257175Z" + created: "2024-06-25T06:53:34.361168797Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1040,7 +1053,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-06-25T06:12:52.796695726Z" + created: "2024-06-25T06:53:34.36062142Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1056,7 +1069,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-06-25T06:12:52.796129628Z" + created: "2024-06-25T06:53:34.360065867Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1072,7 +1085,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-06-25T06:12:52.795472902Z" + created: "2024-06-25T06:53:34.359420647Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1088,7 +1101,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-06-25T06:12:52.794812107Z" + created: "2024-06-25T06:53:34.358722316Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1104,7 +1117,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-06-25T06:12:52.794225Z" + created: "2024-06-25T06:53:34.358149962Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1120,7 +1133,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-06-25T06:12:52.793281916Z" + created: "2024-06-25T06:53:34.357590262Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1136,7 +1149,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-06-25T06:12:52.792249778Z" + created: "2024-06-25T06:53:34.356996818Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1152,7 +1165,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-06-25T06:12:52.79166731Z" + created: "2024-06-25T06:53:34.355655254Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1166,4 +1179,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-06-25T06:12:52.790950752Z" +generated: "2024-06-25T06:53:34.354929102Z" diff --git a/packages/grid/helm/repo/syft-0.8.7-beta.12.tgz b/packages/grid/helm/repo/syft-0.8.7-beta.12.tgz new file mode 100644 index 0000000000000000000000000000000000000000..9bb5f0888ab517e5cf47fc41fb857960bacd6c42 GIT binary patch literal 12296 zcmV+jF!#?NiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PMYMciT9!DB7R>D{z&b+nw2wj0ZuGTC3N+PU6mZCb7M3r+dxW zYcFp+iZUk@sUayR?(Ur5eimO6Nl_2WvYeS?d`K(;C=`G~RiRKQBzInV_~u5&>EKR| zgYO$T(JeiYlgaES9ykNhSCGm9p$7&M9s83s&pLRlA#Rl z5T1nN^fL}Vi-$5OpqUI=#NK-sh4y*}2MEP5@{=^a8zK|wPY@6M=m4Q5ODAva=P1^I40v)Hq$mL$Ly8JQL|zml z1Gf-FlX14J(c3e0dUk~_kFU_xUrsL3)fxKv=>6p}Iypt>7iVujzPUO%JI(Nrp^3)+ zZ(uHSh{+J8@e~dYUZ9_LoemDhQ8tsh5W)=zXR^TCAhc8J1h5ESLkBZC5|AV|>vi)0u^Y0j9Eyb<$0 z&ne62UykNQ>R?VI11Nz2bQ;C^5KkhL0SO}mm*w|%yBCl|(`=&N`(r;XX1e&2oX3zr zIz)ktM^Mjb!1(12O42BnBS4A&TUM_8dFlTKL&V?t2lMf($RHc30Xn;dIQ9*o^u|w+ z@1eVBiayIQMQM}`D3V!sy#82*as;NVJjj~J|N0x;o#sQHPgB46ShQA>-JzZ)X*8~y zjpgT(0%tI?$wm2`oy#Qo9L1*U-J=jxwbq9BCXfv#Y7EAoj=N^+QTj6D6y*YuNeb}+ zLgW08VqjmO%c2zeB?_ivKp9nJ%EE`&D1kK7C5qw$bd?i9g2E`xsA`bczs+l9l|H;4 zB7!k4!IrhsXdGmm1{p}DA0$ZMfc}K?785N!FA5Cm7i;#XafKl&S8;ueJwM3v^8YL3 z^B?zXq=M7c#wE+>j?HQAV*>FINi+7tYL!gG6dj$PWaF6qeKheaWaKoB@^94#g!(9m zv<$A*RDa4SAcZ9D7e8iYGIAJ4DO81O83+~H&`2kZuSLo0f}eH6L`cVAk|8>TR38>= zDLEYJa}?#e~gM+v*A zF{ee|H(P}b{`ZZGlrJk~NcKNuoE^rHVinm{&J^%McA01_{Y=y(zJVdS z@kciYsA2-IbFp<=hw6tPU>wDFzee#Vh?61mWRPT2_s=Oz;XGIBIp_8PLP0copKVf# z_H(FeIvPQIT#-ywS0+iM{d~eM;Ga`S(xYh_9Ze>2bSr~-xoH9?VIku#W*e$v|B?;I z+j6s1;D#nq6x2tj>{O9^9!0^h)H8DFO7b6zZBy~#ur`XMV`an2v|YEV`xWLyo0n<&Xwc{&*+5Smf!n?YD=6va-8J0kz( z#nE_(!YGDGaJMk&^_`ZS_@Cge05Rj=)nETsTd>vg@^!J3e<}^7B+J!mN@;C)$R@3$ zokwL;emIID$-9|R@0>Sfs7LueWh_6H1DxyOS+R^8bQ8y~HxQy}k_}%t^20BvY#<-v zI0^!Y^R*RCQ{7lJBpE{9id=Ah8abGU;N?%>l@K^#H~$-s{erwR09 z7{Qkz>SZjaw?Um4KF59v{WK%LbchbWr{A+p+L|P3407DRE!KVS`%VZ%lcazrvyc6nHMl>LDSVc9Tek7*W!yuBTuVf%Y4gGS8Ug#KP zI{P`tGu!{ceQ_*nmK?Fx-$z`KFVJzQ19=NbrrEAEpRh`cJ*a(asC6?LJ(;|p3G`kj z(X$mWz4dhzhYLB6VV?7tugTgr=b#ek&BG?+|ZEFru?`BwX&G#sMu z(G3J+lrigR(ghXu!-dbuMCvTB4P^u=l0nb~CNEkv`}0h5%|%spaFrk#0|Gz2ffz|+ zz#PUfj&8vWih952|MoJy{sW}a$W-=*<<~riH-6@yO4z+2;;))HH!`{D4N*^E;tFL6 z%8fE0F}E=ohdB;dhGHdb2RjPDbsUe`3~TPW#0GFY%Mmv5oL&pA)M?RRPa4yk&V+CrZJ4%Q1w1RpY)@h0dRS;`ljS`r7DBBv^O8Ml6{^-URJCxJ6xv`{ z09D$ef=NzXLkOl!3)hyQlnYD{tch_LqZ)%{yS8mC33$^%fAzrC=^4khnWGdlwlUJh zE_X4PQVB5BahQh{Glo-(yBLE5#3BIN5iTW~Y8M_%96MN|)Ns!RkD-MWwQo>rgO)5h^Ua9nW|jMCt<4*oRvgyuSN*@qP#5$8wpaJVBKrSZXY7 zfwd!~5{whUGZrl@z%F)(YcZSKj9bhS3~Wn#gu`|{DYtIjQgkcUXB(}lZM$45ZA%xl zag8tpY>tI=JtAGIHP_k@>^cHmY7tj?u47S)d#>D)#?naA{kZKC@hT8L=(e}@LXbX%(i2ae~r{h3gagFTgf6@3ay2v zN?FEGW@*B)qpAQ(*< z6WDb@Nn#0GfHZ=8S`$S)No}4nPRFtd7{H2y5ZsX%97Bj9f{QIjd@IJ{aLWVNcwA|v zxE8>b(3UNf)D~shxQ=GjbD6OnVS6BjazNP{JD$y5+ia)bzR9g~#hN`C%MrY}k>Ln- zpbG%^u&|6?3_Tmwc5&oj*CpfNR;#5UE$;RGwovA9q+ zD2eG-9XXHvTbaVq`Nqmsy-p&l1dI!f*>) zOnO{U+XH*Mo=jEX>)m#QbH`P-G6q{5z||Ia9bqI0$1_|KLM_g)w6*0jpo}_-VavEw zFeNwGZErQTPh_4@jR_u$^nF{jS-wmOF0a4M@mZx;wkW`bUlDA zEe^Zy25{yGmRqTSDN6_-jswaio^9J0YYH~ElwuT^k_n6c6HBu2jquk~tD{MZg11m=b%mmPliRM;xt< zBRmbpwq1`o)G|~^Wl<)j$0-+6VFm?qfK+O@DD?iMD9t;m-s?irfxLA0`5+jFpB(zYZQ zg4xCrLTVsjT-$YRs))jtt3i9hwmoXwL~m`GItPe%p^0h?%=NSo#wc!4&K*u%V@u)* z&ADe0tuYhIW`qitGRA~UCAK^O$;Fm(_GrO3t4p)qj;UjZbF3`}E-;T_YB7f}O&KT~ zKvV1z$EC~&C9!L{jy6VVBOs$Ei?faCrrDo4L;ueyOkr_Hyj5QeBZ`9Yl(2<`iFOF) zj-{m5h6=DeZrj8mnt>+-&i0h<6nmb%or$7;uCgG<9Qlu%!TJkhYGEP zqPCF4^)h}>foYFvESYWFQZoyvAj&od%X{tZyP4;XX2deelh`55gs&iBH4dhS2+}zt!)Mb zYp#S6wqnWwO^hUl8qerV3#M9w>)4n{i#VVuB{n6F*lroO`|&0UlT_|x%kP3ii7|#b zOxc#8p399iRB;=W1Wy`Dx#wA~c5UUj%F@8N(XOj4N|hAbDeWI*s&63KgC}vy6GoDM8VU~0RcA0U7O^pW*5M~3%j-&ME$IH!y5yQ`Z5bOYbN;Pwo zBaQ7?*&c#91T&k0tu4o))G(kfafOXNn#~6_!~pZCG|aVtk?n=T+2;_<4mzN?R?4!B zu&5_FQLZ#zHWNy5+qj;`wREVY+;h0?NXjvHZK8~eZQ~m3Y}bJg{ueMO;bfW?OJWx` zVsI?5v`c^qM-Tx-S^~4&0waJa0=8tp1biV^~Sx9&udbc$Pz4?NJ9?f~+_!??V6JYvDeZ-{~HM z43pd+u(};o#E<#x2LV!0+z^(juk(w)3eeae#WDqh(kO?L-hdyY+5ER~7!E!ra+F`f{QdVn@Fp#l2(_2AEkU%wt?1r2z9 z*jcV$mYBqTn0l!9r=Cmq zY{8|vqvR}Z&b)9nES=?ZkL^&G(A#d#i{;BpeSLM;tWn+g{`&Rc27>V*xjD>7AuqL# z3R=UzFgV49{P#N=qDScRj_Iem=UMf5QzB~Dd)$$#clxP$(R12=YRmr_aqmzCSR?;A z$L;$+obc!Pk9$cupF5AtjH{u*ANwhq zVJTh}0mxpJm*8I^TK!>_*W#Bk+C}6mE#mvKT{Qd&W&JroWmk&smD`zKTx`FRS+DhE z+Cx2p@wkV2<2;T;FSjOs@O!8SlN%UAEQ5X-2Be329Dne8FDrog(6_$Hg1-VkByJ_v8# z$R^}#H=(i32)N|XzK3pP@;A7f^Xfq%SE>S`fiLaMKTsO|U5j`VQ+eTadCDr zM8&oIB9hIYk{^&C-par?19TJw(PuDKuu>TV%J;H)6F>eV@m{-k0 zD3zabsj9(1s9|tW#uQ3W8X*`@(z~iX8D=^`K?j4H8c+*Q(6K3;%hb9yP|4RyADHih zifg#rz?^;ENcz64HA?9Atu}k|5G-g*E1E$V-v5=MI(KRSUbFwRxZrL3uT7rgfA1yb^5%UMeVR@Ro{|SxLo%G7 z#+0GI(61-&-{)btGf`w)$bgRW0Af(o9IV+aR-`qmtZ*drz5Lx*{*f^M@jI*$;D$`1uCnq7Rv%_mezCBbdPf%0J9N z_Ll?AE7k+mW|d21GuB=H)u?|i<4g#Yr3NdEfweK4v#sxosT%%-dX3LT@wYOSS>BI9 zF|WN_(nGz=a;OJYql@7)%I01U@&mYBAt|TQ%k_ae{2wd*tM+a13GQB&_%smxsnW+< zg=@IaRG?XlW!*Zs`SC%onZ00qZW?kkg-TZR9mPnNG3*8h{Y-6Y#2EIIG!3Yt~~XU^iNjO=R=Hl-0-gd0RbXTlIB{iiL{M$0DF$DpOz2 zV*VHX`Z13eSt4)zBoE8=a#twN2C@39pSQZ$nSA6saQ~*6OrsB|k6uSnkO8TyfEjsj zM3UK(EBk07$OEnCOQ6GNX}zp$w{~{>XQ-+3cbpD-qJ(!n%IAwD@DqSGjZ@=2evqkS}sFUrO$Wy$rT){R`UO{|oE+{lERBn$XPE%}ilwPJXX&E9qntd9dz9F7f;C4Vd}5 zdwe!g1pGgn$bU}ZphBoKDtL#vx1q|-4?|{}SFJxqMq!=k0AF~!ww2JqFNKx9cyDd6 z@J?A#$+U}yk%1DoL3bc^)G&-3Aap;yV(t3RB|%^6{277m7{NOKPtlJ5N(rU-dHwGr zt&;yo#iXxL16t&v zcpc}w{;`6SKPt8s((k`F_XD%1$ma3x>uszpUU_BI3$@-ZtBO{L=GhITxGkKH#%rG! zJ5<>|zYWvpbJ~Fa&n5o$GGI0Tr-V_vwg1DG{mlROlIr|FQ~T!n)nz&Lm#g+vdldvR zU5bA zeG-`u`3 zScjSwEPd#f6kdW(O3MK|@`rjFaWtqv-3`!`TCyhojgx{Sw}AvC{4Q zf@-UY^Jx%NBz+WomUl_5!~(iqT$BhE0IBNbmj>f}Q211?>fhl)-oq6Xny&29HeQt_c z+I-gZ<@wQ@M{Ym&8S6yi&DrVI(aGuY#r0p#F0Z<<7^2=MC+W3@I8#PC0iK?{J-)vB z`*~-N7B$||*?;B(Q9J_NLw}%uPNNi>Q{8H1*}N^bl2xj;@?-68dzRm8UbcO?QR|l{ z_v&ck8(0VT?eWh?AKzbHU!0v?T_1lqI(fet?AiOub@z#0E6Y!2d25HafynvM<>jwu z7jHKw(b+_4Vw;$geUo2U-)#I>>^-oSe9Dg{w z`1|#*XBU4vzPQ{Bz1EA{n^IBH#`XEx+57904@d8oteD=;#anmZyU==b_VIMXIa+wT zdypFdw;7B~_`E-Qef)mI@n3AZu9v0JUz0o<$5wb8ot~au9bKKAojwGariQi|MjtPa zuTM^|jxSD+-d|rFzdO0Sy7>EMICQ+izO1!wOG@i)`E`T#`s16w9ber;@GQ2l0=YZ{J;&n(`VfVq=_=rtEi%jtaK+`WL+wJ ziMvr=Hcut2Wn)=@UF0{_80X6P_2u#D+hxjDXS4YQt+L4lPId|MGl2Q|vWw$~(6LYI zl~3CDmQeyLH1>?x)?%3y&3nlpVfi-UV*>G7S}R*P|8<89r-Ni_$Sq|%%gU^0Jr$9$ zrTc=V;^!)sx;U3!9Gt6*B^&u6>gD$tx`o8$@x?DEZ&qz*D?gU*`hPt-ez$u69zL#remTBAy1F{PER@pKT1<^{x|(^o z&IRFZSFtWTWma^#wQ5PLwS&v{V91Hz&$3%RpP32In@5=WsO0h6K4jaH1)$^XYhfZf z%vSxOk*Pe?F_`4A)kM*LmIYzz%OL4zWm|<}kmf85?uNycomOr#RYhAXpd_h`)9K_q zjubSpy-Cx_J4o9+V^Ux*C;s6rJ**5}|KB77@KYUHyIww0o#W|AKbXdFbrVB!69s07 z7;Y4pKnIN2SEjBdy!z<;e9H#pWU>tSO5)mt6Sr&Jn<1}ap+(0(K^#KRPe4y&KfP;p zz0PKnyX3Gaeb`yLf|yRmeFM2W&vIY}ZV%JRI3CwIPzAQUBH2WE68pD)03$HN{C??% zVrM>r{kacKm0HrrvN+!XN8gx*ufXkTIQix*drQn+^*#8#WLwQ&0M}ZGCee> z<%5oKT(wkIE|Zl|7uL+el201$o~LU(=kw`+catdjzc1v^H@UXh!UPxYI!W^uZY$& zY;cD>T3EL9KKRexWgDku{$Eo+Fc9zM{tsrBwQ&E(vY!3F_K~){|IXKt_}wbs8EXUbk^W`FbzOoHfc4BIaL z_3NKej@$m9g45^qzmK%s|MTr^;yV~XMIMj3;%AL?JAgi!TW78FNtgR66nCM%0hi4h zrk!edmA|pyW=DlBeK+fnw({Rx2H=4{oHMZZ_2cZWxI#iUNHYac$@cN8Og1-{@m9XKfSjteruD$4agw9+0-P`h1lcR6V`5gpIT1- zyQa1J-wtkouG{}{yS@Lz^f~_1e$qYfe?E=g_cS*^Yklpw0y^FP2Iy1Ic|En(eL>B9YQ`n>LkMo^N`S#jO^6>LyCk&R%C3UEb8$CcFZh?YxtY(O%NG zesyHpINfZH(y~$L>QhJ0=6DU!VdM8zu4tdf;Q3z;x<>WB*ew_!cZ=XQD7CM#-yfqN zU;Zd3{>vW+e}4H*B4;9<{1JnR^v>~{xkqn~A!BR-M-@csaM=K!j3+@J2O(!{mlM#w z=j%2zS;D*0AZf0tG^j1&*&c57TAsUuFJ#vIz|rR$2+`I1%j{#>=E5b|6==32q*gxd zOL=T}rE*l->~=%>-_rl`TQ|kOyNUh%-A3`hN!$Ko6Z={J+ef;m{ijJ8kE-1Qf_QLyLwrxM#fA^BQ;(ynt|C^8g zU97s!*x&IZ1^td6E82Jc&g)>|7VCrJd@oBY*Z&S;e}1>k`418J`TS=;X&d>!N3s7j zCqGp$@1C3>-qd=FKOHOkNai>|8lV( zs{v|6eq7i-{VT+Mth2lYgGLqdWHa}{cIhpB2WMcmavbce{Xnn50_J^P20Lo)&|k11 zwiV8Td4ZvL3J<@c=PTU%S_K|(VYP^8b8>oe_4f5VlErqRbK2o)W?A=%gSUv#ziGIg z&NZ^xRoi*lonEqS+cLW!@yRX}U zbJ4PwyMc@Ap~$)Pe`_4!J+)G{$JEmQ^V7uQ-~Q|+orhWg*7<)>Y%je3_pJZ#C#^e_ z?z9iQ$tmN5oOEmq>a_Ey`-9@(d-l%XT`0%C9^8$CA^KzC5WdvHYiwb5`Ju)rf=HUz zGLWH$xXG}y`VA$kVoTFF7@}U@)Zxr&dLGoM<8<|*jVy$1nw`n+vXwbdhcQ+)7Dv;x zQMYRp<7ogx)XOAcan}6f{~Z3$%U(lQOXIuq$nTKQz0aU}9cR&~x6$BswJj(9|GY$h z{DB%@O{9~K&-t)B)9m;gS@7#1PbaqLgypdHz( zS)d;Ks}t%%>3W0NCU}7i^)yMNaVf()OCa=@+!?1ypWRJn1l>>rVt+Ep$G9EKB>R?a zY4VY8AsRaPpfXbW z;i$WWLyI!?67v>@sF!8*$}(|u>(}SU^Me{;XTJ)8a0sbBEXVAy#-^Wrh`)ujLjT*r z&F>41;{ehSNOF#;$pVqG%iZjFm$*m)wR3(UrvACmH%IrTKvC? z7Bu@~AMrnIi?;87u;=?9`$;V}T21={L_h`NQKi7Fk*DRtwZ6_6aer|fy7+K|3pZX+ z$9Ruic>Vf+xEJU;|4*AQ#(!n_dHwGr-E;ll$3^sM7(v?-So6NDA@nccKw880sqKXg zR)!W$GBc{#)N0sKVN1hd9nw~|!({*-Xg-{Qy{`qayW$Rwh%?TTK07@kM>)oj^9{=bj3 z>C2$^_5*Fx&!g@F=3!|nBwC_){;cO%CMm>&0;vLc`6-R&4qY!${%~pTz5Va5V9YKPt2Q zG>yiycV$-KKPy=8LjkGFp1-`=j%`@ctBw}xJ)1vg_0$V?$7uup|KKJK% zHzLtWU$Ga}Y4?AWhsXa>7DfvZ_BulJb^4Och06v(`Zim3i%0AA9?Va@ksqS(|8n-> z_;6nNa78CFzE>|gKd^71J&PW=w8T=+itgT*@oJiVpP$ulFeV+&5|1+|H5ZI!qq4ln z{)eT@nV~)~o&E|dMk4v_C5KIKh-JgDveeU!L{mO43`j8;G6+gZouD*Ahw(H#EPo&7 zC03Hm>cXfYy^4bU+o(N4>pQ=3-|GzTUIM#O@m`X9yPM}rbn+%;bY4zhN>S_gcIzpl zzkbGwkL~z%osVSQr{yj(Uo*{(8Gca)Dys&1v%PBtXp^1mecfj}<>9{kXPZ0zeLK+B zbo6B&w9i)Lr75@n4sIYA`=c<5;W64+cmB^=`~3%M5%O&R-A8(X&SjcH9Oe?apn>9H zLpAjSyO z7y9MRB{F`T91KQ&dYJ!Ppnot>zr~07ztxAE(P8#a^)tB*59ffC)Ssr4T$xP{{ya!N zPY(V(Q1a8kp9kqU`#XyL(ZQep_rVMFi;VqfnxK=n$H~E95=Z|4ogNH)1M;va9!LK; i7~Cd0GI03JZ=Tb0dQRVR`ab~x0RR866siCKJ^=vm3yJ3d literal 0 HcmV?d00001 diff --git a/packages/grid/helm/syft/Chart.yaml b/packages/grid/helm/syft/Chart.yaml index b905393e86f..249199ea2a6 100644 --- a/packages/grid/helm/syft/Chart.yaml +++ b/packages/grid/helm/syft/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: syft description: Perform numpy-like analysis on data that remains in someone elses server type: application -version: "0.8.7-beta.11" -appVersion: "0.8.7-beta.11" +version: "0.8.7-beta.12" +appVersion: "0.8.7-beta.12" home: https://github.com/OpenMined/PySyft/ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index f15ff32371c..c94120c9f82 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -1,7 +1,7 @@ global: # Affects only backend, frontend, and seaweedfs containers registry: docker.io - version: 0.8.7-beta.11 + version: 0.8.7-beta.12 # Force default secret values for development. DO NOT SET THIS TO FALSE IN PRODUCTION randomizedSecrets: true diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index 0c37af8ba93..1b431954aef 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = syft -version = attr: "0.8.7-beta.11" +version = attr: "0.8.7-beta.12" description = Perform numpy-like analysis on data that remains in someone elses server author = OpenMined author_email = info@openmined.org diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION index c06d53659ea..99dffe7cb16 100644 --- a/packages/syft/src/syft/VERSION +++ b/packages/syft/src/syft/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.7-beta.11" +__version__ = "0.8.7-beta.12" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index eb2fd2df975..816fef5d4a1 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.8.7-beta.11" +__version__ = "0.8.7-beta.12" # stdlib from collections.abc import Callable diff --git a/packages/syftcli/manifest.yml b/packages/syftcli/manifest.yml index afc84a1c936..fb2c7cff98c 100644 --- a/packages/syftcli/manifest.yml +++ b/packages/syftcli/manifest.yml @@ -1,11 +1,11 @@ manifestVersion: 1.0 -syftVersion: 0.8.7-beta.11 -dockerTag: 0.8.7-beta.11 +syftVersion: 0.8.7-beta.12 +dockerTag: 0.8.7-beta.12 images: - - docker.io/openmined/grid-frontend:0.8.7-beta.11 - - docker.io/openmined/grid-backend:0.8.7-beta.11 + - docker.io/openmined/grid-frontend:0.8.7-beta.12 + - docker.io/openmined/grid-backend:0.8.7-beta.12 - docker.io/library/mongo:7.0.4 - docker.io/traefik:v2.11.0 From e15ffe496f80ee20f83d2c4cbdfa9d5326725bac Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 25 Jun 2024 12:31:35 +0200 Subject: [PATCH 290/313] cache Request.code --- packages/syft/src/syft/service/request/request.py | 2 ++ packages/syft/src/syft/store/linked_obj.py | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 011e96c25d7..cc4293bd85b 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -1212,6 +1212,8 @@ class UserCodeStatusChange(Change): @property def code(self) -> UserCode: + if self.linked_user_code._cache: + return self.linked_user_code._cache return self.linked_user_code.resolve def get_user_code(self, context: AuthedServiceContext) -> UserCode: diff --git a/packages/syft/src/syft/store/linked_obj.py b/packages/syft/src/syft/store/linked_obj.py index 93f63d1f8b4..d6c2c121a1e 100644 --- a/packages/syft/src/syft/store/linked_obj.py +++ b/packages/syft/src/syft/store/linked_obj.py @@ -26,6 +26,8 @@ class LinkedObject(SyftObject): object_type: type[SyftObject] object_uid: UID + _cache = None + __exclude_sync_diff_attrs__ = ["node_uid"] def __str__(self) -> str: @@ -46,7 +48,8 @@ def resolve(self) -> SyftObject: if api is None: raise ValueError(f"api is None. You must login to {self.node_uid}") - return api.services.notifications.resolve_object(self) + self._cache = api.services.notifications.resolve_object(self) + return self._cache def resolve_with_context(self, context: NodeServiceContext) -> Any: if context.node is None: From af009b4831b6db97850008fdaed4eec7bb8ff5d9 Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Tue, 25 Jun 2024 17:01:26 +0530 Subject: [PATCH 291/313] remove old enclave clode - iteration 1 --- notebooks/api/0.8/05-custom-policy.ipynb | 20 +- .../Enclave-single-notebook-DO-DS.ipynb | 735 ------------------ .../syft/src/syft/client/enclave_client.py | 52 -- .../syft/service/enclave/enclave_service.py | 149 ---- .../syft/src/syft/service/policy/policy.py | 13 - 5 files changed, 1 insertion(+), 968 deletions(-) delete mode 100644 notebooks/tutorials/enclaves/Enclave-single-notebook-DO-DS.ipynb diff --git a/notebooks/api/0.8/05-custom-policy.ipynb b/notebooks/api/0.8/05-custom-policy.ipynb index c87f5f84036..663468260b4 100644 --- a/notebooks/api/0.8/05-custom-policy.ipynb +++ b/notebooks/api/0.8/05-custom-policy.ipynb @@ -286,7 +286,7 @@ " root_context = AuthedServiceContext(\n", " node=context.node, credentials=context.node.verify_key\n", " )\n", - " if context.node.node_type == NodeType.DOMAIN:\n", + " if context.node.node_type in NodeType.DOMAIN:\n", " for var_name, arg_id in allowed_inputs.items():\n", " kwarg_value = action_service._get(\n", " context=root_context,\n", @@ -297,14 +297,6 @@ " if kwarg_value.is_err():\n", " return Err(kwarg_value.err())\n", " code_inputs[var_name] = kwarg_value.ok()\n", - "\n", - " elif context.node.node_type == NodeType.ENCLAVE:\n", - " dict_object = action_service.get(context=root_context, uid=code_item_id)\n", - " if dict_object.is_err():\n", - " return Err(dict_object.err())\n", - " for value in dict_object.ok().syft_action_data.values():\n", - " code_inputs.update(value)\n", - "\n", " else:\n", " raise Exception(\n", " f\"Invalid Node Type for Code Submission:{context.node.node_type}\"\n", @@ -328,11 +320,6 @@ " verify_key=context.node.signing_key.verify_key,\n", " )\n", " allowed_inputs = allowed_inputs.get(node_identity, {})\n", - " elif context.node.node_type == NodeType.ENCLAVE:\n", - " base_dict = {}\n", - " for key in allowed_inputs.values():\n", - " base_dict.update(key)\n", - " allowed_inputs = base_dict\n", " else:\n", " raise Exception(\n", " f\"Invalid Node Type for Code Submission:{context.node.node_type}\"\n", @@ -403,11 +390,6 @@ " verify_key=context.node.signing_key.verify_key,\n", " )\n", " allowed_inputs = allowed_inputs.get(node_identity, {})\n", - " elif context.node.node_type == NodeType.ENCLAVE:\n", - " base_dict = {}\n", - " for key in allowed_inputs.values():\n", - " base_dict.update(key)\n", - " allowed_inputs = base_dict\n", " else:\n", " raise Exception(\n", " f\"Invalid Node Type for Code Submission:{context.node.node_type}\"\n", diff --git a/notebooks/tutorials/enclaves/Enclave-single-notebook-DO-DS.ipynb b/notebooks/tutorials/enclaves/Enclave-single-notebook-DO-DS.ipynb deleted file mode 100644 index 62d7fc48c99..00000000000 --- a/notebooks/tutorials/enclaves/Enclave-single-notebook-DO-DS.ipynb +++ /dev/null @@ -1,735 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "0", - "metadata": {}, - "outputs": [], - "source": [ - "# third party\n", - "from recordlinkage.datasets import load_febrl4\n", - "\n", - "# syft absolute\n", - "import syft as sy" - ] - }, - { - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "# Create Nodes and connect to gateway" - ] - }, - { - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "create enclave node" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "# Local Python Node\n", - "enclave_node = sy.orchestra.launch(\n", - " name=\"Enclave\",\n", - " node_type=sy.NodeType.ENCLAVE,\n", - " local_db=True,\n", - " dev_mode=True,\n", - " reset=True,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "# syft absolute\n", - "from syft.abstract_node import NodeType\n", - "\n", - "assert enclave_node.python_node.node_type == NodeType.ENCLAVE" - ] - }, - { - "cell_type": "markdown", - "id": "5", - "metadata": {}, - "source": [ - "Create canada node & italy node" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "ca_node = sy.orchestra.launch(name=\"Canada\", local_db=True, reset=True, dev_mode=True)\n", - "it_node = sy.orchestra.launch(name=\"Italy\", local_db=True, reset=True, dev_mode=True)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "assert ca_node.python_node.node_type == NodeType.DOMAIN\n", - "assert it_node.python_node.node_type == NodeType.DOMAIN" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "Create gateway Node" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "gateway_node = sy.orchestra.launch(\n", - " name=\"gateway\",\n", - " node_type=sy.NodeType.GATEWAY,\n", - " local_db=True,\n", - " reset=True,\n", - " dev_mode=True,\n", - " association_request_auto_approval=True,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "10", - "metadata": {}, - "source": [ - "Connect nodes to gateway" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "enclave_guest_client = enclave_node.client\n", - "ca_guest_client = ca_node.client\n", - "it_guest_client = it_node.client" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "12", - "metadata": {}, - "outputs": [], - "source": [ - "# syft absolute\n", - "from syft.client.domain_client import DomainClient\n", - "from syft.client.enclave_client import EnclaveClient\n", - "from syft.client.gateway_client import GatewayClient\n", - "\n", - "assert isinstance(enclave_guest_client, EnclaveClient)\n", - "assert isinstance(ca_guest_client, DomainClient)\n", - "assert isinstance(it_guest_client, DomainClient)\n", - "assert isinstance(gateway_node.client, GatewayClient)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "13", - "metadata": {}, - "outputs": [], - "source": [ - "# syft absolute\n", - "# Connect enclave to gateway\n", - "from syft.service.response import SyftSuccess\n", - "\n", - "res = enclave_guest_client.connect_to_gateway(handle=gateway_node)\n", - "assert isinstance(res, SyftSuccess)\n", - "res" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "14", - "metadata": {}, - "outputs": [], - "source": [ - "# Connect Canada to gateway\n", - "res = ca_guest_client.connect_to_gateway(handle=gateway_node)\n", - "assert isinstance(res, SyftSuccess)\n", - "res" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "15", - "metadata": {}, - "outputs": [], - "source": [ - "# Connect Italy to gateway\n", - "res = it_guest_client.connect_to_gateway(handle=gateway_node)\n", - "assert isinstance(res, SyftSuccess)\n", - "res" - ] - }, - { - "cell_type": "markdown", - "id": "16", - "metadata": {}, - "source": [ - "# DOs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "17", - "metadata": {}, - "outputs": [], - "source": [ - "do_ca_client = ca_node.login(email=\"info@openmined.org\", password=\"changethis\")\n", - "do_it_client = it_node.login(email=\"info@openmined.org\", password=\"changethis\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "18", - "metadata": {}, - "outputs": [], - "source": [ - "# syft absolute\n", - "from syft.client.domain_client import DomainClient\n", - "\n", - "assert isinstance(do_ca_client, DomainClient)\n", - "assert isinstance(do_it_client, DomainClient)" - ] - }, - { - "cell_type": "markdown", - "id": "19", - "metadata": {}, - "source": [ - "## Upload dataset" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", - "metadata": {}, - "outputs": [], - "source": [ - "# Using public datasets from Freely Extensible Biomedical Record Linkage (Febrl)\n", - "canada_census_data, italy_census_data = load_febrl4()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "21", - "metadata": {}, - "outputs": [], - "source": [ - "for dataset, client, country in zip(\n", - " [canada_census_data, italy_census_data],\n", - " [do_ca_client, do_it_client],\n", - " [\"Canada\", \"Italy\"],\n", - "):\n", - " private_data, mock_data = dataset[:2500], dataset[2500:]\n", - " dataset = sy.Dataset(\n", - " name=f\"{country} - FEBrl Census Data\",\n", - " description=\"abc\",\n", - " asset_list=[\n", - " sy.Asset(\n", - " name=\"census_data\",\n", - " mock=mock_data,\n", - " data=private_data,\n", - " shape=private_data.shape,\n", - " mock_is_real=True,\n", - " )\n", - " ],\n", - " )\n", - " client.upload_dataset(dataset)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "22", - "metadata": {}, - "outputs": [], - "source": [ - "assert len(do_ca_client.datasets.get_all()) == 1\n", - "assert len(do_it_client.datasets.get_all()) == 1" - ] - }, - { - "cell_type": "markdown", - "id": "23", - "metadata": {}, - "source": [ - "## create accounts for DS" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24", - "metadata": {}, - "outputs": [], - "source": [ - "for client in [do_ca_client, do_it_client]:\n", - " res = client.register(\n", - " name=\"Sheldon\",\n", - " email=\"sheldon@caltech.edu\",\n", - " password=\"changethis\",\n", - " password_verify=\"changethis\",\n", - " )\n", - " assert isinstance(res, SyftSuccess)" - ] - }, - { - "cell_type": "markdown", - "id": "25", - "metadata": {}, - "source": [ - "# DS" - ] - }, - { - "cell_type": "markdown", - "id": "26", - "metadata": {}, - "source": [ - "## Login into gateway as guest" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "27", - "metadata": {}, - "outputs": [], - "source": [ - "ds_gateway_client = gateway_node.client" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "28", - "metadata": {}, - "outputs": [], - "source": [ - "# Explore the domains and enclaves connected to the gateway\n", - "ds_gateway_client.domains" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "29", - "metadata": {}, - "outputs": [], - "source": [ - "# Log into canada as proxy_client\n", - "ds_ca_proxy_client = ds_gateway_client.domains[0]\n", - "ds_ca_proxy_client = ds_ca_proxy_client.login(\n", - " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", - ")\n", - "assert ds_ca_proxy_client.name == \"Canada\"\n", - "assert ds_ca_proxy_client.connection.proxy_target_uid == do_ca_client.id\n", - "assert isinstance(ds_ca_proxy_client, DomainClient)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30", - "metadata": {}, - "outputs": [], - "source": [ - "# Log into italy as proxy_client\n", - "ds_it_proxy_client = ds_gateway_client.domains[1]\n", - "ds_it_proxy_client = ds_it_proxy_client.login(\n", - " email=\"sheldon@caltech.edu\", password=\"changethis\"\n", - ")\n", - "assert ds_it_proxy_client.name == \"Italy\"\n", - "assert ds_it_proxy_client.connection.proxy_target_uid == do_it_client.id\n", - "assert isinstance(ds_it_proxy_client, DomainClient)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "31", - "metadata": {}, - "outputs": [], - "source": [ - "# Create an account and log into enclave as proxy client\n", - "ds_enclave_proxy_client = ds_gateway_client.enclaves[0]\n", - "ds_enclave_proxy_client = ds_enclave_proxy_client.login(\n", - " email=\"sheldon@caltech.edu\", password=\"changethis\", name=\"Sheldon\", register=True\n", - ")\n", - "assert ds_enclave_proxy_client.name == \"Enclave\"\n", - "assert ds_enclave_proxy_client.connection.proxy_target_uid == enclave_guest_client.id\n", - "assert isinstance(ds_enclave_proxy_client, EnclaveClient)" - ] - }, - { - "cell_type": "markdown", - "id": "32", - "metadata": {}, - "source": [ - "## Find datasets" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "33", - "metadata": {}, - "outputs": [], - "source": [ - "canada_census_data = ds_ca_proxy_client.datasets[-1].assets[0]\n", - "italy_census_data = ds_it_proxy_client.datasets[-1].assets[0]" - ] - }, - { - "cell_type": "markdown", - "id": "34", - "metadata": {}, - "source": [ - "## Create Request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "35", - "metadata": {}, - "outputs": [], - "source": [ - "@sy.syft_function_single_use(\n", - " canada_census_data=canada_census_data,\n", - " italy_census_data=italy_census_data,\n", - " share_results_with_owners=True,\n", - ")\n", - "def compute_census_matches(canada_census_data, italy_census_data):\n", - " # third party\n", - " import recordlinkage\n", - "\n", - " # Index step\n", - " indexer = recordlinkage.Index()\n", - " indexer.block(\"given_name\")\n", - "\n", - " candidate_links = indexer.index(canada_census_data, italy_census_data)\n", - "\n", - " # Comparison step\n", - " compare_cl = recordlinkage.Compare()\n", - "\n", - " compare_cl.exact(\"given_name\", \"given_name\", label=\"given_name\")\n", - " compare_cl.string(\n", - " \"surname\", \"surname\", method=\"jarowinkler\", threshold=0.85, label=\"surname\"\n", - " )\n", - " compare_cl.exact(\"date_of_birth\", \"date_of_birth\", label=\"date_of_birth\")\n", - " compare_cl.exact(\"suburb\", \"suburb\", label=\"suburb\")\n", - " compare_cl.exact(\"state\", \"state\", label=\"state\")\n", - " compare_cl.string(\"address_1\", \"address_1\", threshold=0.85, label=\"address_1\")\n", - "\n", - " features = compare_cl.compute(\n", - " candidate_links, canada_census_data, italy_census_data\n", - " )\n", - "\n", - " # Classification step\n", - " matches = features[features.sum(axis=1) > 3]\n", - "\n", - " return len(matches)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36", - "metadata": {}, - "outputs": [], - "source": [ - "# Check result of mock data execution\n", - "mock_result = compute_census_matches(\n", - " canada_census_data=canada_census_data.mock,\n", - " italy_census_data=italy_census_data.mock,\n", - ")\n", - "mock_result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "37", - "metadata": {}, - "outputs": [], - "source": [ - "req = ds_enclave_proxy_client.request_code_execution(compute_census_matches)\n", - "req" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "38", - "metadata": {}, - "outputs": [], - "source": [ - "assert isinstance(req, sy.service.request.request.Request)" - ] - }, - { - "cell_type": "markdown", - "id": "39", - "metadata": {}, - "source": [ - "# DOs" - ] - }, - { - "cell_type": "markdown", - "id": "40", - "metadata": {}, - "source": [ - "## Approve" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "41", - "metadata": {}, - "outputs": [], - "source": [ - "for client in [do_ca_client, do_it_client]:\n", - " res = client.requests[-1].approve()\n", - " assert isinstance(res, SyftSuccess)" - ] - }, - { - "cell_type": "markdown", - "id": "42", - "metadata": {}, - "source": [ - "# DS" - ] - }, - { - "cell_type": "markdown", - "id": "43", - "metadata": {}, - "source": [ - "## Get result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "44", - "metadata": {}, - "outputs": [], - "source": [ - "status = ds_enclave_proxy_client.code.get_all()[-1].status\n", - "status" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "45", - "metadata": {}, - "outputs": [], - "source": [ - "for st, _ in status.status_dict.values():\n", - " assert st == sy.service.request.request.UserCodeStatus.APPROVED" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "46", - "metadata": {}, - "outputs": [], - "source": [ - "ds_enclave_proxy_client.code[-1].output_policy" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "47", - "metadata": {}, - "outputs": [], - "source": [ - "result_pointer = ds_enclave_proxy_client.code.compute_census_matches(\n", - " canada_census_data=canada_census_data, italy_census_data=italy_census_data\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "48", - "metadata": {}, - "outputs": [], - "source": [ - "result_pointer" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "49", - "metadata": {}, - "outputs": [], - "source": [ - "result_pointer.syft_action_data == 858" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "50", - "metadata": {}, - "outputs": [], - "source": [ - "real_result = result_pointer.get()\n", - "real_result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "51", - "metadata": {}, - "outputs": [], - "source": [ - "assert real_result == 813" - ] - }, - { - "cell_type": "markdown", - "id": "52", - "metadata": {}, - "source": [ - "# DO" - ] - }, - { - "cell_type": "markdown", - "id": "53", - "metadata": {}, - "source": [ - "## Can also get the result" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "54", - "metadata": {}, - "outputs": [], - "source": [ - "request = do_ca_client.requests[0]\n", - "request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "55", - "metadata": {}, - "outputs": [], - "source": [ - "result_ptr = request.get_results()\n", - "result_ptr" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "56", - "metadata": {}, - "outputs": [], - "source": [ - "assert result_ptr.syft_action_data == 813" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "57", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "toc": { - "base_numbering": 1, - "nav_menu": {}, - "number_sections": true, - "sideBar": true, - "skip_h1_title": false, - "title_cell": "Table of Contents", - "title_sidebar": "Contents", - "toc_cell": false, - "toc_position": { - "height": "calc(100% - 180px)", - "left": "10px", - "top": "150px", - "width": "358.398px" - }, - "toc_section_display": true, - "toc_window_display": true - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/packages/syft/src/syft/client/enclave_client.py b/packages/syft/src/syft/client/enclave_client.py index 32eebdf3189..6252817f630 100644 --- a/packages/syft/src/syft/client/enclave_client.py +++ b/packages/syft/src/syft/client/enclave_client.py @@ -2,12 +2,10 @@ from __future__ import annotations # stdlib -from typing import Any from typing import TYPE_CHECKING # relative from ..abstract_node import NodeSideType -from ..client.api import APIRegistry from ..serde.serializable import serializable from ..service.metadata.node_metadata import NodeMetadataJSON from ..service.network.routes import NodeRouteType @@ -15,7 +13,6 @@ from ..service.response import SyftSuccess from ..types.syft_object import SYFT_OBJECT_VERSION_3 from ..types.syft_object import SyftObject -from ..types.uid import UID from ..util.assets import load_png_base64 from ..util.notebook_ui.styles import FONT_CSS from .api import APIModule @@ -27,7 +24,6 @@ if TYPE_CHECKING: # relative from ..orchestra import NodeHandle - from ..service.code.user_code import SubmitUserCode @serializable() @@ -109,54 +105,6 @@ def connect_to_gateway( def get_enclave_metadata(self) -> EnclaveMetadata: return EnclaveMetadata(route=self.connection.route) - def request_code_execution(self, code: SubmitUserCode) -> Any | SyftError: - # relative - from ..service.code.user_code_service import SubmitUserCode - - if not isinstance(code, SubmitUserCode): - raise Exception( - f"The input code should be of type: {SubmitUserCode} got:{type(code)}" - ) - if code.input_policy_init_kwargs is None: - raise ValueError(f"code {code}'s input_policy_init_kwargs is None") - - enclave_metadata = self.get_enclave_metadata() - - code_id = UID() - code.id = code_id - code.enclave_metadata = enclave_metadata - - apis = [] - for k, v in code.input_policy_init_kwargs.items(): - # We would need the verify key of the data scientist to be able to index the correct client - # Since we do not want the data scientist to pass in the clients to the enclave client - # from a UX perspecitve. - # we will use the recent node id to find the correct client - # assuming that it is the correct client - # Warning: This could lead to inconsistent results, when we have multiple clients - # in the same node pointing to the same node. - # One way, by which we could solve this in the long term, - # by forcing the user to pass only assets to the sy.ExactMatch, - # by which we could extract the verify key of the data scientist - # as each object comes with a verify key and node_uid - # the asset object would contain the verify key of the data scientist. - api = APIRegistry.get_by_recent_node_uid(k.node_id) - if api is None: - raise ValueError(f"could not find client for input {v}") - else: - apis += [api] - - for api in apis: - res = api.services.code.request_code_execution(code=code) - if isinstance(res, SyftError): - return res - - # we are using the real method here, see the .code property getter - _ = self.code - res = self._request_code_execution(code=code) - - return res - def _repr_html_(self) -> str: commands = """
  • <your_client> diff --git a/packages/syft/src/syft/service/enclave/enclave_service.py b/packages/syft/src/syft/service/enclave/enclave_service.py index 46afbb6af8c..0807ada8da0 100644 --- a/packages/syft/src/syft/service/enclave/enclave_service.py +++ b/packages/syft/src/syft/service/enclave/enclave_service.py @@ -1,163 +1,14 @@ # stdlib # relative -from ...client.enclave_client import EnclaveClient -from ...client.enclave_client import EnclaveMetadata from ...serde.serializable import serializable -from ...service.response import SyftError -from ...service.response import SyftSuccess -from ...service.user.user_roles import GUEST_ROLE_LEVEL from ...store.document_store import DocumentStore -from ...types.twin_object import TwinObject -from ...types.uid import UID -from ..action.action_object import ActionObject -from ..code.user_code import UserCode -from ..code.user_code import UserCodeStatus -from ..context import AuthedServiceContext -from ..context import ChangeContext -from ..network.routes import route_to_connection -from ..policy.policy import InputPolicy from ..service import AbstractService -from ..service import service_method -# TODO 🟣 Created a generic Enclave Service -# Currently it mainly works only for Azure @serializable() class EnclaveService(AbstractService): store: DocumentStore def __init__(self, store: DocumentStore) -> None: self.store = store - - @service_method( - path="enclave.send_user_code_inputs_to_enclave", - name="send_user_code_inputs_to_enclave", - roles=GUEST_ROLE_LEVEL, - ) - def send_user_code_inputs_to_enclave( - self, - context: AuthedServiceContext, - user_code_id: UID, - inputs: dict, - node_name: str, - node_id: UID, - ) -> SyftSuccess | SyftError: - if not context.node or not context.node.signing_key: - return SyftError(message=f"{type(context)} has no node") - - root_context = AuthedServiceContext( - credentials=context.node.verify_key, node=context.node - ) - - user_code_service = context.node.get_service("usercodeservice") - action_service = context.node.get_service("actionservice") - user_code = user_code_service.get_by_uid(context=root_context, uid=user_code_id) - if isinstance(user_code, SyftError): - return user_code - - reason: str = context.extra_kwargs.get("reason", "") - status_update = user_code.get_status(root_context).mutate( - value=(UserCodeStatus.APPROVED, reason), - node_name=node_name, - node_id=node_id, - verify_key=context.credentials, - ) - if isinstance(status_update, SyftError): - return status_update - - res = user_code.status_link.update_with_context(root_context, status_update) - if isinstance(res, SyftError): - return res - - root_context = context.as_root_context() - if not action_service.exists(context=context, obj_id=user_code_id): - dict_object = ActionObject.from_obj({}) - dict_object.id = user_code_id - dict_object[str(context.credentials)] = inputs - root_context.extra_kwargs = {"has_result_read_permission": True} - # TODO: Instead of using the action store, modify to - # use the action service directly to store objects - # TODO: we store this in the actionstore isntead of blob stoarge, - # which is bad, but we cannot update in the blobstorage - res = action_service._set( - root_context, - dict_object, - ignore_detached_objs=True, - skip_clear_cache=True, - ) - if res.is_err(): - return SyftError(message=res.value) - - else: - res = action_service.get(uid=user_code_id, context=root_context) - if res.is_ok(): - dict_object = res.ok() - dict_object[str(context.credentials)] = inputs - # TODO: we store this in the actionstore isntead of blob stoarge, - # which is bad, but we cannot update in the blobstorage - res = action_service._set( - root_context, - dict_object, - ignore_detached_objs=True, - skip_clear_cache=True, - ) - if res.is_err(): - return SyftError(message=res.value) - else: - return SyftError( - message=f"Error while fetching the object on Enclave: {res.err()}" - ) - - return SyftSuccess(message="Enclave Code Status Updated Successfully") - - -# Checks if the given user code would propogate value to enclave on acceptance -def propagate_inputs_to_enclave( - user_code: UserCode, context: ChangeContext -) -> SyftSuccess | SyftError: - if isinstance(user_code.enclave_metadata, EnclaveMetadata): - # TODO 🟣 Restructure url it work for local mode host.docker.internal - - connection = route_to_connection(user_code.enclave_metadata.route) - enclave_client = EnclaveClient( - connection=connection, - credentials=context.node.signing_key, - ) - - send_method = ( - enclave_client.api.services.enclave.send_user_code_inputs_to_enclave - ) - - else: - return SyftSuccess(message="Current Request does not require Enclave Transfer") - - input_policy: InputPolicy | None = user_code.get_input_policy( - context.to_service_ctx() - ) - if input_policy is None: - return SyftError(message=f"{user_code}'s input policy is None") - inputs = input_policy._inputs_for_context(context) - if isinstance(inputs, SyftError): - return inputs - - # Save inputs to blob store - for var_name, var_value in inputs.items(): - if isinstance(var_value, ActionObject | TwinObject): - # Set the obj location to enclave - var_value._set_obj_location_( - enclave_client.api.node_uid, - enclave_client.verify_key, - ) - var_value._save_to_blob_storage() - - inputs[var_name] = var_value - - # send data of the current node to enclave - res = send_method( - user_code_id=user_code.id, - inputs=inputs, - node_name=context.node.name, - node_id=context.node.id, - ) - return res diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index 0e4c791d3ea..fa97790ba39 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -594,14 +594,6 @@ def retrieve_from_db( if kwarg_value.is_err(): return Err(kwarg_value.err()) code_inputs[var_name] = kwarg_value.ok() - - elif context.node.node_type == NodeType.ENCLAVE: - dict_object = action_service.get(context=root_context, uid=code_item_id) - if dict_object.is_err(): - return Err(dict_object.err()) - for value in dict_object.ok().syft_action_data.values(): - code_inputs.update(value) - else: raise Exception( f"Invalid Node Type for Code Submission:{context.node.node_type}" @@ -621,11 +613,6 @@ def allowed_ids_only( verify_key=context.node.signing_key.verify_key, ) allowed_inputs = allowed_inputs.get(node_identity, {}) - elif context.node.node_type == NodeType.ENCLAVE: - base_dict = {} - for key in allowed_inputs.values(): - base_dict.update(key) - allowed_inputs = base_dict else: raise Exception( f"Invalid Node Type for Code Submission:{context.node.node_type}" From 60ce86e453d1e1509089b5d26c0f4bc9a46b6e75 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 25 Jun 2024 13:38:23 +0200 Subject: [PATCH 292/313] cache get_data_protocol --- packages/syft/src/syft/protocol/data_protocol.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index f357f867467..176bf989167 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -3,6 +3,7 @@ from collections.abc import Iterable from collections.abc import MutableMapping from collections.abc import MutableSequence +from functools import cache import hashlib import json from operator import itemgetter @@ -528,6 +529,7 @@ def reset_dev_protocol(self) -> None: self.save_history(self.protocol_history) +@cache def get_data_protocol(raise_exception: bool = False) -> DataProtocol: return DataProtocol( filename=data_protocol_file_name(), From 869c2ac84f703de5c3cb57f60b0f6e61a2e9bbd1 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 25 Jun 2024 13:39:56 +0200 Subject: [PATCH 293/313] cache has_output_read_permissions --- packages/syft/src/syft/service/code/user_code.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index b71f5aa4cc6..34c0dc70acd 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -332,6 +332,8 @@ class UserCode(SyncableSyftObject): origin_node_side_type: NodeSideType l0_deny_reason: str | None = None + _has_output_read_permissions_cache: bool | None = None + __table_coll_widths__ = [ "min-content", "auto", @@ -439,9 +441,14 @@ def _compute_status_l0( if isinstance(api, SyftError): return api node_identity = NodeIdentity.from_api(api) - is_approved = api.output.has_output_read_permissions( - self.id, self.user_verify_key - ) + + if self._has_output_read_permissions_cache is None: + is_approved = api.output.has_output_read_permissions( + self.id, self.user_verify_key + ) + self._has_output_read_permissions_cache = is_approved + else: + is_approved = self._has_output_read_permissions_cache else: # Serverside node_identity = NodeIdentity.from_node(context.node) From 36ea12d3ecfe0ec1ad950a40a69206f8380ebcd2 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 25 Jun 2024 13:49:39 +0200 Subject: [PATCH 294/313] rename resolve cache --- packages/syft/src/syft/protocol/data_protocol.py | 12 ++++++++++-- packages/syft/src/syft/service/request/request.py | 4 ++-- packages/syft/src/syft/store/linked_obj.py | 6 +++--- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 176bf989167..8b007ee3edf 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -529,14 +529,22 @@ def reset_dev_protocol(self) -> None: self.save_history(self.protocol_history) -@cache +# @cache def get_data_protocol(raise_exception: bool = False) -> DataProtocol: - return DataProtocol( + return _get_data_protocol( filename=data_protocol_file_name(), raise_exception=raise_exception, ) +@cache +def _get_data_protocol(filename: str, raise_exception: bool = False) -> DataProtocol: + return DataProtocol( + filename=filename, + raise_exception=raise_exception, + ) + + def stage_protocol_changes() -> Result[SyftSuccess, SyftError]: data_protocol = get_data_protocol(raise_exception=True) return data_protocol.stage_protocol_changes() diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index cc4293bd85b..ac7fe6b607f 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -1212,8 +1212,8 @@ class UserCodeStatusChange(Change): @property def code(self) -> UserCode: - if self.linked_user_code._cache: - return self.linked_user_code._cache + if self.linked_user_code._resolve_cache: + return self.linked_user_code._resolve_cache return self.linked_user_code.resolve def get_user_code(self, context: AuthedServiceContext) -> UserCode: diff --git a/packages/syft/src/syft/store/linked_obj.py b/packages/syft/src/syft/store/linked_obj.py index d6c2c121a1e..6079e2634d0 100644 --- a/packages/syft/src/syft/store/linked_obj.py +++ b/packages/syft/src/syft/store/linked_obj.py @@ -26,7 +26,7 @@ class LinkedObject(SyftObject): object_type: type[SyftObject] object_uid: UID - _cache = None + _resolve_cache: SyftObject | None = None __exclude_sync_diff_attrs__ = ["node_uid"] @@ -48,8 +48,8 @@ def resolve(self) -> SyftObject: if api is None: raise ValueError(f"api is None. You must login to {self.node_uid}") - self._cache = api.services.notifications.resolve_object(self) - return self._cache + self._resolve_cache = api.services.notifications.resolve_object(self) + return self._resolve_cache def resolve_with_context(self, context: NodeServiceContext) -> Any: if context.node is None: From c317005233a1009759f52763439867064eb72b65 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 25 Jun 2024 13:51:38 +0200 Subject: [PATCH 295/313] Update packages/syft/src/syft/protocol/data_protocol.py --- packages/syft/src/syft/protocol/data_protocol.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 8b007ee3edf..170c103d7a8 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -529,7 +529,6 @@ def reset_dev_protocol(self) -> None: self.save_history(self.protocol_history) -# @cache def get_data_protocol(raise_exception: bool = False) -> DataProtocol: return _get_data_protocol( filename=data_protocol_file_name(), From 072d20a2686005f60fec62b53c8805de7534a5c8 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 25 Jun 2024 13:54:24 +0200 Subject: [PATCH 296/313] fix lint --- packages/syft/src/syft/store/linked_obj.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/store/linked_obj.py b/packages/syft/src/syft/store/linked_obj.py index 6079e2634d0..3dd1ec9295d 100644 --- a/packages/syft/src/syft/store/linked_obj.py +++ b/packages/syft/src/syft/store/linked_obj.py @@ -48,7 +48,9 @@ def resolve(self) -> SyftObject: if api is None: raise ValueError(f"api is None. You must login to {self.node_uid}") - self._resolve_cache = api.services.notifications.resolve_object(self) + self._resolve_cache: SyftObject = api.services.notifications.resolve_object( + self + ) return self._resolve_cache def resolve_with_context(self, context: NodeServiceContext) -> Any: From ac9d3c101f14666b9e7c00d8806bf187ffe5f181 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 25 Jun 2024 15:03:59 +0200 Subject: [PATCH 297/313] linting --- packages/syft/src/syft/store/linked_obj.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/store/linked_obj.py b/packages/syft/src/syft/store/linked_obj.py index 3dd1ec9295d..6e76a799930 100644 --- a/packages/syft/src/syft/store/linked_obj.py +++ b/packages/syft/src/syft/store/linked_obj.py @@ -48,10 +48,9 @@ def resolve(self) -> SyftObject: if api is None: raise ValueError(f"api is None. You must login to {self.node_uid}") - self._resolve_cache: SyftObject = api.services.notifications.resolve_object( - self - ) - return self._resolve_cache + resolve: SyftObject = api.services.notifications.resolve_object(self) + self._resolve_cache = resolve + return resolve def resolve_with_context(self, context: NodeServiceContext) -> Any: if context.node is None: From 03e52f07c96e09688cd6fb40341540119abfb7cb Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Tue, 25 Jun 2024 15:10:57 +0200 Subject: [PATCH 298/313] add build_state arg to resolve --- .../syft/src/syft/service/sync/diff_state.py | 10 +++--- .../src/syft/service/sync/resolve_widget.py | 35 +++++++++++++++---- 2 files changed, 34 insertions(+), 11 deletions(-) diff --git a/packages/syft/src/syft/service/sync/diff_state.py b/packages/syft/src/syft/service/sync/diff_state.py index 9778e98f200..d5f8eb60caf 100644 --- a/packages/syft/src/syft/service/sync/diff_state.py +++ b/packages/syft/src/syft/service/sync/diff_state.py @@ -566,11 +566,11 @@ class ObjectDiffBatch(SyftObject): root_diff: ObjectDiff sync_direction: SyncDirection | None - def resolve(self) -> "ResolveWidget": + def resolve(self, build_state: bool = True) -> "ResolveWidget": # relative from .resolve_widget import ResolveWidget - return ResolveWidget(self) + return ResolveWidget(self, build_state=build_state) def walk_graph( self, @@ -1142,14 +1142,16 @@ class NodeDiff(SyftObject): include_ignored: bool = False - def resolve(self) -> "PaginatedResolveWidget | SyftSuccess": + def resolve( + self, build_state: bool = True + ) -> "PaginatedResolveWidget | SyftSuccess": if len(self.batches) == 0: return SyftSuccess(message="No batches to resolve") # relative from .resolve_widget import PaginatedResolveWidget - return PaginatedResolveWidget(batches=self.batches) + return PaginatedResolveWidget(batches=self.batches, build_state=build_state) def __getitem__(self, idx: Any) -> ObjectDiffBatch: return self.batches[idx] diff --git a/packages/syft/src/syft/service/sync/resolve_widget.py b/packages/syft/src/syft/service/sync/resolve_widget.py index 496fb7a65eb..4a868634df3 100644 --- a/packages/syft/src/syft/service/sync/resolve_widget.py +++ b/packages/syft/src/syft/service/sync/resolve_widget.py @@ -105,10 +105,19 @@ def __init__( direction: SyncDirection, with_box: bool = True, show_share_warning: bool = False, + build_state: bool = True, ): - self.low_properties = diff.repr_attr_dict("low") - self.high_properties = diff.repr_attr_dict("high") - self.statuses = diff.repr_attr_diffstatus_dict() + build_state = build_state + + if build_state: + self.low_properties = diff.repr_attr_dict("low") + self.high_properties = diff.repr_attr_dict("high") + self.statuses = diff.repr_attr_diffstatus_dict() + else: + self.low_properties = {} + self.high_properties = {} + self.statuses = {} + self.direction = direction self.diff: ObjectDiff = diff self.with_box = with_box @@ -203,9 +212,10 @@ def __init__( self, diff: ObjectDiff, direction: SyncDirection, + build_state: bool = True, ): self.direction = direction - + self.build_state = build_state self.share_private_data = False self.diff: ObjectDiff = diff self.sync: bool = False @@ -275,6 +285,7 @@ def build(self) -> widgets.VBox: self.direction, with_box=False, show_share_warning=self.show_share_button, + build_state=self.build_state, ).widget accordion, share_private_checkbox, sync_checkbox = self.build_accordion( @@ -411,8 +422,12 @@ def _on_share_private_data_change(self, change: Any) -> None: class ResolveWidget: def __init__( - self, obj_diff_batch: ObjectDiffBatch, on_sync_callback: Callable | None = None + self, + obj_diff_batch: ObjectDiffBatch, + on_sync_callback: Callable | None = None, + build_state: bool = True, ): + self.build_state = build_state self.obj_diff_batch: ObjectDiffBatch = obj_diff_batch self.id2widget: dict[ UID, CollapsableObjectDiffWidget | MainObjectDiffWidget @@ -483,6 +498,7 @@ def batch_diff_widgets(self) -> list[CollapsableObjectDiffWidget]: CollapsableObjectDiffWidget( diff, direction=self.obj_diff_batch.sync_direction, + build_state=self.build_state, ) for diff in dependents ] @@ -498,7 +514,9 @@ def dependent_root_diff_widgets(self) -> list[CollapsableObjectDiffWidget]: ] widgets = [ CollapsableObjectDiffWidget( - diff, direction=self.obj_diff_batch.sync_direction + diff, + direction=self.obj_diff_batch.sync_direction, + build_state=self.build_state, ) for diff in other_roots ] @@ -509,6 +527,7 @@ def main_object_diff_widget(self) -> MainObjectDiffWidget: obj_diff_widget = MainObjectDiffWidget( self.obj_diff_batch.root_diff, direction=self.obj_diff_batch.sync_direction, + build_state=self.build_state, ) return obj_diff_widget @@ -712,12 +731,14 @@ class PaginatedResolveWidget: paginated by a PaginationControl widget. """ - def __init__(self, batches: list[ObjectDiffBatch]): + def __init__(self, batches: list[ObjectDiffBatch], build_state: bool = True): + self.build_state = build_state self.batches = batches self.resolve_widgets: list[ResolveWidget] = [ ResolveWidget( batch, on_sync_callback=partial(self.on_click_sync, i), + build_state=build_state, ) for i, batch in enumerate(self.batches) ] From 258afeb1ae13f8d1e0fc212038b5a7a380e142e9 Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 25 Jun 2024 15:37:21 +0200 Subject: [PATCH 299/313] cache make_get --- packages/syft/src/syft/client/client.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index f5bde266ae9..eb5b1d1cc44 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -14,6 +14,8 @@ # third party from argon2 import PasswordHasher +from cachetools import TTLCache +from cachetools import cached from pydantic import field_validator import requests from requests import Response @@ -200,6 +202,8 @@ def session(self) -> Session: return self.session_cache def _make_get(self, path: str, params: dict | None = None) -> bytes: + if params is None: + return self._make_get_no_params(path) url = self.url.with_path(path) response = self.session.get( str(url), @@ -218,6 +222,26 @@ def _make_get(self, path: str, params: dict | None = None) -> bytes: return response.content + @cached(cache=TTLCache(maxsize=128, ttl=300)) + def _make_get_no_params(self, path: str) -> bytes: + print(path) + url = self.url.with_path(path) + response = self.session.get( + str(url), + headers=self.headers, + verify=verify_tls(), + proxies={}, + ) + if response.status_code != 200: + raise requests.ConnectionError( + f"Failed to fetch {url}. Response returned with code {response.status_code}" + ) + + # upgrade to tls if available + self.url = upgrade_tls(self.url, response) + + return response.content + def _make_post( self, path: str, From 3bfdb366a2ee8392746ca3dabe6ec6f488abff92 Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Tue, 25 Jun 2024 19:27:23 +0530 Subject: [PATCH 300/313] remove enclave code in request.py --- .../syft/src/syft/service/request/request.py | 24 ++----------------- 1 file changed, 2 insertions(+), 22 deletions(-) diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index 011e96c25d7..b2732c69f5e 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -670,7 +670,7 @@ def approve( metadata = api.connection.get_node_metadata(api.signing_key) else: metadata = None - message, is_enclave = None, False + message = None is_code_request = not isinstance(self.codes, SyftError) @@ -679,12 +679,7 @@ def approve( message="Multiple codes detected, please use approve_nested=True" ) - if self.code and not isinstance(self.code, SyftError): - is_enclave = getattr(self.code, "enclave_metadata", None) is not None - - if is_enclave: - message = "On approval, the result will be released to the enclave." - elif metadata and metadata.node_side_type == NodeSideType.HIGH_SIDE.value: + if metadata and metadata.node_side_type == NodeSideType.HIGH_SIDE.value: message = ( "You're approving a request on " f"{metadata.node_side_type} side {metadata.node_type} " @@ -1317,12 +1312,6 @@ def mutate( ) return res - def is_enclave_request(self, user_code: UserCode) -> bool: - return ( - user_code.is_enclave_code is not None - and self.value == UserCodeStatus.APPROVED - ) - def _run( self, context: ChangeContext, apply: bool ) -> Result[SyftSuccess, SyftError]: @@ -1346,16 +1335,7 @@ def _run( if isinstance(updated_status, SyftError): return Err(updated_status.message) - # relative - from ..enclave.enclave_service import propagate_inputs_to_enclave - self.linked_obj.update_with_context(context, updated_status) - if self.is_enclave_request(user_code): - enclave_res = propagate_inputs_to_enclave( - user_code=user_code, context=context - ) - if isinstance(enclave_res, SyftError): - return enclave_res else: updated_status = self.mutate(user_code_status, context, undo=True) if isinstance(updated_status, SyftError): From 3ac23c0da53c18c8b4fb7fd32955dc0b6b0d22ce Mon Sep 17 00:00:00 2001 From: Aziz Berkay Yesilyurt Date: Tue, 25 Jun 2024 16:00:40 +0200 Subject: [PATCH 301/313] cached type hints --- packages/syft/src/syft/types/syft_object.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 9df3f22300c..863b65581ab 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -7,6 +7,7 @@ from collections.abc import Mapping from collections.abc import Sequence from collections.abc import Set +from functools import cache from hashlib import sha256 import inspect from inspect import Signature @@ -229,6 +230,11 @@ def get_transform( ) +@cache +def cached_get_type_hints(cls: type) -> dict[str, Any]: + return typing.get_type_hints(cls) + + class SyftMigrationRegistry: __migration_version_registry__: dict[str, dict[int, str]] = {} __migration_transform_registry__: dict[str, dict[str, Callable]] = {} @@ -578,7 +584,7 @@ def _syft_set_validate_private_attrs_(self, **kwargs: Any) -> None: return # Validate and set private attributes # https://github.com/pydantic/pydantic/issues/2105 - annotations = typing.get_type_hints(self.__class__) + annotations = cached_get_type_hints(self.__class__) for attr, decl in self.__private_attributes__.items(): value = kwargs.get(attr, decl.get_default()) var_annotation = annotations.get(attr) From 72b6a444c20ab3dcf99cd7c3b672c5d9aec5b61c Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Tue, 25 Jun 2024 19:37:56 +0530 Subject: [PATCH 302/313] remove enclave code in request and user code --- .../syft/src/syft/service/code/user_code.py | 53 +++++++++++------ .../syft/service/code/user_code_service.py | 59 ------------------- .../syft/service/code_history/code_history.py | 17 +++++- .../syft/src/syft/service/request/request.py | 4 -- 4 files changed, 52 insertions(+), 81 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index b71f5aa4cc6..a3330161a7c 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -54,6 +54,7 @@ from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SYFT_OBJECT_VERSION_4 from ...types.syft_object import SYFT_OBJECT_VERSION_5 +from ...types.syft_object import SYFT_OBJECT_VERSION_6 from ...types.syft_object import SyftObject from ...types.syncable_object import SyncableSyftObject from ...types.transforms import TransformContext @@ -299,7 +300,7 @@ class UserCodeV4(SyncableSyftObject): @serializable() -class UserCode(SyncableSyftObject): +class UserCodeV5(SyncableSyftObject): # version __canonical_name__ = "UserCode" __version__ = SYFT_OBJECT_VERSION_5 @@ -332,6 +333,40 @@ class UserCode(SyncableSyftObject): origin_node_side_type: NodeSideType l0_deny_reason: str | None = None + +@serializable() +class UserCode(SyncableSyftObject): + # version + __canonical_name__ = "UserCode" + __version__ = SYFT_OBJECT_VERSION_6 + + id: UID + node_uid: UID | None = None + user_verify_key: SyftVerifyKey + raw_code: str + input_policy_type: type[InputPolicy] | UserPolicy + input_policy_init_kwargs: dict[Any, Any] | None = None + input_policy_state: bytes = b"" + output_policy_type: type[OutputPolicy] | UserPolicy + output_policy_init_kwargs: dict[Any, Any] | None = None + output_policy_state: bytes = b"" + parsed_code: str + service_func_name: str + unique_func_name: str + user_unique_func_name: str + code_hash: str + signature: inspect.Signature + status_link: LinkedObject | None = None + input_kwargs: list[str] + submit_time: DateTime | None = None + # tracks if the code calls domain.something, variable is set during parsing + uses_domain: bool = False + + nested_codes: dict[str, tuple[LinkedObject, dict]] | None = {} + worker_pool_name: str | None = None + origin_node_side_type: NodeSideType + l0_deny_reason: str | None = None + __table_coll_widths__ = [ "min-content", "auto", @@ -510,10 +545,6 @@ def get_status( return SyftError(message=status.err()) return status.ok() - @property - def is_enclave_code(self) -> bool: - return self.enclave_metadata is not None - @property def input_owners(self) -> list[str] | None: if self.input_policy_init_kwargs is not None: @@ -738,17 +769,6 @@ def store_execution_output( def byte_code(self) -> PyCodeObject | None: return compile_byte_code(self.parsed_code) - def get_results(self) -> Any: - # relative - from ...client.api import APIRegistry - - api = APIRegistry.api_for(self.node_uid, self.syft_client_verify_key) - if api is None: - return SyftError( - message=f"Can't access the api. You must login to {self.node_uid}" - ) - return api.services.code.get_results(self) - @property def assets(self) -> list[Asset]: # relative @@ -926,7 +946,6 @@ class SubmitUserCode(SyftObject): output_policy_init_kwargs: dict[Any, Any] | None = {} local_function: Callable | None = None input_kwargs: list[str] - enclave_metadata: EnclaveMetadata | None = None worker_pool_name: str | None = None __repr_attrs__ = ["func_name", "code"] diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 58ec982ac2c..374fcf22475 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -9,8 +9,6 @@ from result import Result # relative -from ...abstract_node import NodeType -from ...client.enclave_client import EnclaveClient from ...serde.serializable import serializable from ...store.document_store import DocumentStore from ...store.linked_obj import LinkedObject @@ -23,7 +21,6 @@ from ..action.action_permissions import ActionObjectPermission from ..action.action_permissions import ActionPermission from ..context import AuthedServiceContext -from ..network.routes import route_to_connection from ..output.output_service import ExecutionOutput from ..policy.policy import OutputPolicy from ..request.request import Request @@ -316,62 +313,6 @@ def load_user_code(self, context: AuthedServiceContext) -> None: user_code_items = result.ok() load_approved_policy_code(user_code_items=user_code_items, context=context) - @service_method(path="code.get_results", name="get_results", roles=GUEST_ROLE_LEVEL) - def get_results( - self, context: AuthedServiceContext, inp: UID | UserCode - ) -> list[UserCode] | SyftError: - uid = inp.id if isinstance(inp, UserCode) else inp - code_result = self.stash.get_by_uid(context.credentials, uid=uid) - - if code_result.is_err(): - return SyftError(message=code_result.err()) - code = code_result.ok() - - if code.is_enclave_code: - # if the current node is not the enclave - if not context.node.node_type == NodeType.ENCLAVE: - connection = route_to_connection(code.enclave_metadata.route) - enclave_client = EnclaveClient( - connection=connection, - credentials=context.node.signing_key, - ) - if enclave_client.code is None: - return SyftError( - message=f"{enclave_client} can't access the user code api" - ) - outputs = enclave_client.code.get_results(code.id) - if isinstance(outputs, list): - for output in outputs: - output.syft_action_data # noqa: B018 - else: - outputs.syft_action_data # noqa: B018 - return outputs - - # if the current node is the enclave - else: - if not code.get_status(context.as_root_context()).approved: - return code.status.get_status_message() - - output_history = code.get_output_history( - context=context.as_root_context() - ) - if isinstance(output_history, SyftError): - return output_history - - if len(output_history) > 0: - res = resolve_outputs( - context=context, - output_ids=output_history[-1].output_ids, - ) - if res.is_err(): - return res - res = delist_if_single(res.ok()) - return Ok(res) - else: - return SyftError(message="No results available") - else: - return SyftError(message="Endpoint only supported for enclave code") - def is_execution_allowed( self, code: UserCode, diff --git a/packages/syft/src/syft/service/code_history/code_history.py b/packages/syft/src/syft/service/code_history/code_history.py index c3b1151fe2e..488083cf0c6 100644 --- a/packages/syft/src/syft/service/code_history/code_history.py +++ b/packages/syft/src/syft/service/code_history/code_history.py @@ -8,6 +8,7 @@ from ...serde.serializable import serializable from ...service.user.user_roles import ServiceRole from ...types.syft_object import SYFT_OBJECT_VERSION_2 +from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...types.syft_object import SyftObject from ...types.syft_object import SyftVerifyKey from ...types.uid import UID @@ -20,7 +21,7 @@ @serializable() -class CodeHistory(SyftObject): +class CodeHistoryV2(SyftObject): # version __canonical_name__ = "CodeHistory" __version__ = SYFT_OBJECT_VERSION_2 @@ -33,6 +34,20 @@ class CodeHistory(SyftObject): service_func_name: str comment_history: list[str] = [] + +@serializable() +class CodeHistory(SyftObject): + # version + __canonical_name__ = "CodeHistory" + __version__ = SYFT_OBJECT_VERSION_3 + + id: UID + node_uid: UID + user_verify_key: SyftVerifyKey + user_code_history: list[UID] = [] + service_func_name: str + comment_history: list[str] = [] + __attr_searchable__ = ["user_verify_key", "service_func_name"] def add_code(self, code: UserCode, comment: str | None = None) -> None: diff --git a/packages/syft/src/syft/service/request/request.py b/packages/syft/src/syft/service/request/request.py index b2732c69f5e..2680bc31b14 100644 --- a/packages/syft/src/syft/service/request/request.py +++ b/packages/syft/src/syft/service/request/request.py @@ -610,9 +610,6 @@ def code(self) -> UserCode | SyftError: message="This type of request does not have code associated with it." ) - def get_results(self) -> Any: - return self.code.get_results() - @property def current_change_state(self) -> dict[UID, bool]: change_applied_map = {} @@ -1341,7 +1338,6 @@ def _run( if isinstance(updated_status, SyftError): return Err(updated_status.message) - # TODO: Handle Enclave approval. self.linked_obj.update_with_context(context, updated_status) return Ok(SyftSuccess(message=f"{type(self)} Success")) except Exception as e: From 46320181e9bef52ffe44c5ad349276d47179cd6d Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Tue, 25 Jun 2024 19:40:33 +0530 Subject: [PATCH 303/313] fix mypy --- packages/syft/src/syft/service/policy/policy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/policy/policy.py b/packages/syft/src/syft/service/policy/policy.py index fa97790ba39..e4800f04d6e 100644 --- a/packages/syft/src/syft/service/policy/policy.py +++ b/packages/syft/src/syft/service/policy/policy.py @@ -605,7 +605,7 @@ def allowed_ids_only( allowed_inputs: dict[NodeIdentity, Any], kwargs: dict[str, Any], context: AuthedServiceContext, -) -> dict[str, UID]: +) -> dict[NodeIdentity, UID]: if context.node.node_type == NodeType.DOMAIN: node_identity = NodeIdentity( node_name=context.node.name, From 2cea46823f1420472afa074ca2230fe25de633e8 Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Tue, 25 Jun 2024 19:43:04 +0530 Subject: [PATCH 304/313] fix submit user code --- .../syft/src/syft/service/code/user_code.py | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index a3330161a7c..cbd260a64b7 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -931,11 +931,31 @@ class UserCodeUpdate(PartialSyftObject): @serializable(without=["local_function"]) -class SubmitUserCode(SyftObject): +class SubmitUserCodeV4(SyftObject): # version __canonical_name__ = "SubmitUserCode" __version__ = SYFT_OBJECT_VERSION_4 + id: UID | None = None # type: ignore[assignment] + code: str + func_name: str + signature: inspect.Signature + input_policy_type: SubmitUserPolicy | UID | type[InputPolicy] + input_policy_init_kwargs: dict[Any, Any] | None = {} + output_policy_type: SubmitUserPolicy | UID | type[OutputPolicy] + output_policy_init_kwargs: dict[Any, Any] | None = {} + local_function: Callable | None = None + input_kwargs: list[str] + enclave_metadata: EnclaveMetadata | None = None + worker_pool_name: str | None = None + + +@serializable(without=["local_function"]) +class SubmitUserCode(SyftObject): + # version + __canonical_name__ = "SubmitUserCode" + __version__ = SYFT_OBJECT_VERSION_5 + id: UID | None = None # type: ignore[assignment] code: str func_name: str From eb1fc6be5a5c32a6e9596c4a29c5285e2f778727 Mon Sep 17 00:00:00 2001 From: rasswanth-s <43314053+rasswanth-s@users.noreply.github.com> Date: Tue, 25 Jun 2024 19:45:23 +0530 Subject: [PATCH 305/313] fix custom policy notebook --- notebooks/api/0.8/05-custom-policy.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/api/0.8/05-custom-policy.ipynb b/notebooks/api/0.8/05-custom-policy.ipynb index 663468260b4..8c7b18c9328 100644 --- a/notebooks/api/0.8/05-custom-policy.ipynb +++ b/notebooks/api/0.8/05-custom-policy.ipynb @@ -286,7 +286,7 @@ " root_context = AuthedServiceContext(\n", " node=context.node, credentials=context.node.verify_key\n", " )\n", - " if context.node.node_type in NodeType.DOMAIN:\n", + " if context.node.node_type == NodeType.DOMAIN:\n", " for var_name, arg_id in allowed_inputs.items():\n", " kwarg_value = action_service._get(\n", " context=root_context,\n", From c29f1527d3bd02440ef7b35ab663438c56cd602d Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:54:42 +0000 Subject: [PATCH 306/313] [syft]bump version --- .bumpversion.cfg | 2 +- VERSION | 2 +- packages/grid/VERSION | 2 +- .../backend/grid/images/worker_cpu.dockerfile | 2 +- packages/grid/devspace.yaml | 2 +- packages/grid/frontend/package.json | 2 +- packages/grid/helm/repo/index.yaml | 187 ++++++++++-------- .../grid/helm/repo/syft-0.8.7-beta.13.tgz | Bin 0 -> 12296 bytes packages/grid/helm/syft/Chart.yaml | 4 +- packages/grid/helm/syft/values.yaml | 2 +- packages/syft/setup.cfg | 2 +- packages/syft/src/syft/VERSION | 2 +- packages/syft/src/syft/__init__.py | 2 +- packages/syftcli/manifest.yml | 8 +- 14 files changed, 116 insertions(+), 103 deletions(-) create mode 100644 packages/grid/helm/repo/syft-0.8.7-beta.13.tgz diff --git a/.bumpversion.cfg b/.bumpversion.cfg index ef70d88f6ae..cd48a82318d 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.7-beta.12 +current_version = 0.8.7-beta.13 tag = False tag_name = {new_version} commit = True diff --git a/VERSION b/VERSION index 99dffe7cb16..df1dec5602a 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.7-beta.12" +__version__ = "0.8.7-beta.13" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/VERSION b/packages/grid/VERSION index 99dffe7cb16..df1dec5602a 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.8.7-beta.12" +__version__ = "0.8.7-beta.13" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/grid/images/worker_cpu.dockerfile b/packages/grid/backend/grid/images/worker_cpu.dockerfile index c566cb841b2..94297fee0a9 100644 --- a/packages/grid/backend/grid/images/worker_cpu.dockerfile +++ b/packages/grid/backend/grid/images/worker_cpu.dockerfile @@ -5,7 +5,7 @@ # NOTE: This dockerfile will be built inside a grid-backend container in PROD # Hence COPY will not work the same way in DEV vs. PROD -ARG SYFT_VERSION_TAG="0.8.7-beta.12" +ARG SYFT_VERSION_TAG="0.8.7-beta.13" FROM openmined/grid-backend:${SYFT_VERSION_TAG} # should match base image python version diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index 650e3120ed9..6e12cf245cc 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -27,7 +27,7 @@ vars: DOCKER_IMAGE_SEAWEEDFS: openmined/grid-seaweedfs DOCKER_IMAGE_ENCLAVE_ATTESTATION: openmined/grid-enclave-attestation CONTAINER_REGISTRY: "docker.io" - VERSION: "0.8.7-beta.12" + VERSION: "0.8.7-beta.13" PLATFORM: $(uname -m | grep -q 'arm64' && echo "arm64" || echo "amd64") # This is a list of `images` that DevSpace can build for this project diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index 586d677afb3..15fddc2a6d2 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "pygrid-ui", - "version": "0.8.7-beta.12", + "version": "0.8.7-beta.13", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index 5501c3be72c..56fdc4275cb 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,22 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.8.7-beta.13 + created: "2024-06-25T14:52:54.915182775Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 1dbe3ecdfec57bf25020cbcff783fab908f0eb0640ad684470b2fd1da1928005 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.8.7-beta.13.tgz + version: 0.8.7-beta.13 - apiVersion: v2 appVersion: 0.8.7-beta.12 - created: "2024-06-25T06:53:34.402656615Z" + created: "2024-06-25T14:52:54.914499475Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e92b2f3a522dabb3a79ff762a7042ae16d2bf3a53eebbb2885a69b9f834d109c @@ -16,7 +29,7 @@ entries: version: 0.8.7-beta.12 - apiVersion: v2 appVersion: 0.8.7-beta.11 - created: "2024-06-25T06:53:34.401975147Z" + created: "2024-06-25T14:52:54.913815474Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 099f6cbd44b699ee2410a4be012ed1a8a65bcacb06a43057b2779d7fe34fc0ad @@ -29,7 +42,7 @@ entries: version: 0.8.7-beta.11 - apiVersion: v2 appVersion: 0.8.7-beta.10 - created: "2024-06-25T06:53:34.401295772Z" + created: "2024-06-25T14:52:54.913093962Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 00773cb241522e281c1915339fc362e047650e08958a736e93d6539f44cb5e25 @@ -42,7 +55,7 @@ entries: version: 0.8.7-beta.10 - apiVersion: v2 appVersion: 0.8.7-beta.9 - created: "2024-06-25T06:53:34.408352854Z" + created: "2024-06-25T14:52:54.921044649Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a3f8e85d9ddef7a644b959fcc2fcb0fc08f7b6abae1045e893d0d62fa4ae132e @@ -55,7 +68,7 @@ entries: version: 0.8.7-beta.9 - apiVersion: v2 appVersion: 0.8.7-beta.8 - created: "2024-06-25T06:53:34.407714717Z" + created: "2024-06-25T14:52:54.920407065Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a422ac88d8fd1fb80d5004d5eb6e95fa9efc7f6a87da12e5ac04829da7f04c4d @@ -68,7 +81,7 @@ entries: version: 0.8.7-beta.8 - apiVersion: v2 appVersion: 0.8.7-beta.7 - created: "2024-06-25T06:53:34.40700263Z" + created: "2024-06-25T14:52:54.919767347Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0dc313a1092e6256a7c8aad002c8ec380b3add2c289d680db1e238a336399b7a @@ -81,7 +94,7 @@ entries: version: 0.8.7-beta.7 - apiVersion: v2 appVersion: 0.8.7-beta.6 - created: "2024-06-25T06:53:34.406389289Z" + created: "2024-06-25T14:52:54.91914935Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 052a2ec1102d2a4c9915f95647abd4a6012f56fa05a106f4952ee9b55bf7bae8 @@ -94,7 +107,7 @@ entries: version: 0.8.7-beta.6 - apiVersion: v2 appVersion: 0.8.7-beta.5 - created: "2024-06-25T06:53:34.405770749Z" + created: "2024-06-25T14:52:54.918496807Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1728af756907c3fcbe87c2fd2de014a2d963c22a4c2eb6af6596b525a9b9a18a @@ -107,7 +120,7 @@ entries: version: 0.8.7-beta.5 - apiVersion: v2 appVersion: 0.8.7-beta.4 - created: "2024-06-25T06:53:34.405139654Z" + created: "2024-06-25T14:52:54.917833966Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 387a57a3904a05ed61e92ee48605ef6fd5044ff7e822e0924e0d4c485e2c88d2 @@ -120,7 +133,7 @@ entries: version: 0.8.7-beta.4 - apiVersion: v2 appVersion: 0.8.7-beta.3 - created: "2024-06-25T06:53:34.403876083Z" + created: "2024-06-25T14:52:54.916857511Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 3668002b7a4118516b2ecd61d6275f60d83fc12841587ab8f62e1c1200731c67 @@ -133,7 +146,7 @@ entries: version: 0.8.7-beta.3 - apiVersion: v2 appVersion: 0.8.7-beta.2 - created: "2024-06-25T06:53:34.403232787Z" + created: "2024-06-25T14:52:54.915789101Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e62217ffcadee2b8896ab0543f9ccc42f2df898fd979438ac9376d780b802af7 @@ -146,7 +159,7 @@ entries: version: 0.8.7-beta.2 - apiVersion: v2 appVersion: 0.8.7-beta.1 - created: "2024-06-25T06:53:34.40063933Z" + created: "2024-06-25T14:52:54.912442092Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 553981fe1d5c980e6903b3ff2f1b9b97431f6dd8aee91e3976bcc5594285235e @@ -159,7 +172,7 @@ entries: version: 0.8.7-beta.1 - apiVersion: v2 appVersion: 0.8.6 - created: "2024-06-25T06:53:34.400141236Z" + created: "2024-06-25T14:52:54.911941434Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ddbbe6fea1702e57404875eb3019a3b1a341017bdbb5fbc6ce418507e5c15756 @@ -172,7 +185,7 @@ entries: version: 0.8.6 - apiVersion: v2 appVersion: 0.8.6-beta.1 - created: "2024-06-25T06:53:34.399615239Z" + created: "2024-06-25T14:52:54.911419657Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: cc2c81ef6796ac853dce256e6bf8a6af966c21803e6534ea21920af681c62e61 @@ -185,7 +198,7 @@ entries: version: 0.8.6-beta.1 - apiVersion: v2 appVersion: 0.8.5 - created: "2024-06-25T06:53:34.399046622Z" + created: "2024-06-25T14:52:54.910887971Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: db5d90d44006209fd5ecdebd88f5fd56c70f7c76898343719a0ff8da46da948a @@ -198,7 +211,7 @@ entries: version: 0.8.5 - apiVersion: v2 appVersion: 0.8.5-post.2 - created: "2024-06-25T06:53:34.398297737Z" + created: "2024-06-25T14:52:54.910132145Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ea3f7269b55f773fa165d7008c054b7cf3ec4c62eb40a96f08cd3a9b77fd2165 @@ -211,7 +224,7 @@ entries: version: 0.8.5-post.2 - apiVersion: v2 appVersion: 0.8.5-post.1 - created: "2024-06-25T06:53:34.39775036Z" + created: "2024-06-25T14:52:54.909328308Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9deb844d3dc2d8480c60f8c631dcc7794adfb39cec3aa3b1ce22ea26fdf87d02 @@ -224,7 +237,7 @@ entries: version: 0.8.5-post.1 - apiVersion: v2 appVersion: 0.8.5-beta.10 - created: "2024-06-25T06:53:34.390273423Z" + created: "2024-06-25T14:52:54.901637641Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9cfe01e8f57eca462261a24a805b41509be2de9a0fee76e331d124ed98c4bc49 @@ -237,7 +250,7 @@ entries: version: 0.8.5-beta.10 - apiVersion: v2 appVersion: 0.8.5-beta.9 - created: "2024-06-25T06:53:34.396319949Z" + created: "2024-06-25T14:52:54.908047519Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b @@ -250,7 +263,7 @@ entries: version: 0.8.5-beta.9 - apiVersion: v2 appVersion: 0.8.5-beta.8 - created: "2024-06-25T06:53:34.395578407Z" + created: "2024-06-25T14:52:54.907301222Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298 @@ -263,7 +276,7 @@ entries: version: 0.8.5-beta.8 - apiVersion: v2 appVersion: 0.8.5-beta.7 - created: "2024-06-25T06:53:34.394805848Z" + created: "2024-06-25T14:52:54.906546207Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd @@ -276,7 +289,7 @@ entries: version: 0.8.5-beta.7 - apiVersion: v2 appVersion: 0.8.5-beta.6 - created: "2024-06-25T06:53:34.394062353Z" + created: "2024-06-25T14:52:54.905804338Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 @@ -289,7 +302,7 @@ entries: version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-06-25T06:53:34.393327654Z" + created: "2024-06-25T14:52:54.905005392Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -302,7 +315,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-06-25T06:53:34.392595029Z" + created: "2024-06-25T14:52:54.904263382Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -315,7 +328,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-06-25T06:53:34.391850402Z" + created: "2024-06-25T14:52:54.90351485Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -328,7 +341,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-06-25T06:53:34.391057434Z" + created: "2024-06-25T14:52:54.902672242Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -341,7 +354,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-06-25T06:53:34.3887868Z" + created: "2024-06-25T14:52:54.900351402Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -353,7 +366,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-06-25T06:53:34.388419701Z" + created: "2024-06-25T14:52:54.899982841Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -365,7 +378,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-06-25T06:53:34.386042961Z" + created: "2024-06-25T14:52:54.897562879Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -377,7 +390,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-06-25T06:53:34.385651898Z" + created: "2024-06-25T14:52:54.897124859Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -389,7 +402,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-06-25T06:53:34.384915345Z" + created: "2024-06-25T14:52:54.896384402Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -401,7 +414,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-06-25T06:53:34.38451279Z" + created: "2024-06-25T14:52:54.895981798Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -413,7 +426,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-06-25T06:53:34.384105926Z" + created: "2024-06-25T14:52:54.895572461Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -425,7 +438,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-06-25T06:53:34.383688974Z" + created: "2024-06-25T14:52:54.895153567Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -437,7 +450,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-06-25T06:53:34.382888916Z" + created: "2024-06-25T14:52:54.894459409Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -449,7 +462,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-06-25T06:53:34.382124342Z" + created: "2024-06-25T14:52:54.893613495Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -461,7 +474,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-06-25T06:53:34.381731875Z" + created: "2024-06-25T14:52:54.893200211Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -473,7 +486,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-06-25T06:53:34.381342294Z" + created: "2024-06-25T14:52:54.892776578Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -485,7 +498,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-06-25T06:53:34.380945099Z" + created: "2024-06-25T14:52:54.892368944Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -497,7 +510,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-06-25T06:53:34.380541652Z" + created: "2024-06-25T14:52:54.891969626Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -509,7 +522,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-06-25T06:53:34.379580107Z" + created: "2024-06-25T14:52:54.891018374Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -521,7 +534,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-06-25T06:53:34.379153818Z" + created: "2024-06-25T14:52:54.890618786Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -533,7 +546,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-06-25T06:53:34.378762103Z" + created: "2024-06-25T14:52:54.890227603Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -545,7 +558,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-06-25T06:53:34.378366711Z" + created: "2024-06-25T14:52:54.889830279Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -557,7 +570,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-06-25T06:53:34.377970237Z" + created: "2024-06-25T14:52:54.88942517Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -569,7 +582,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-06-25T06:53:34.377619408Z" + created: "2024-06-25T14:52:54.889036452Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -581,7 +594,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-06-25T06:53:34.377235889Z" + created: "2024-06-25T14:52:54.888678191Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -593,7 +606,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-06-25T06:53:34.376259338Z" + created: "2024-06-25T14:52:54.888112171Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -605,7 +618,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-06-25T06:53:34.375661466Z" + created: "2024-06-25T14:52:54.88720381Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -617,7 +630,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-06-25T06:53:34.388018558Z" + created: "2024-06-25T14:52:54.899577432Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -629,7 +642,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-06-25T06:53:34.38769439Z" + created: "2024-06-25T14:52:54.89925044Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -641,7 +654,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-06-25T06:53:34.38736388Z" + created: "2024-06-25T14:52:54.898923167Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -653,7 +666,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-06-25T06:53:34.387012901Z" + created: "2024-06-25T14:52:54.898597537Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -665,7 +678,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-06-25T06:53:34.386694855Z" + created: "2024-06-25T14:52:54.898264563Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -677,7 +690,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-06-25T06:53:34.386371027Z" + created: "2024-06-25T14:52:54.897896133Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -689,7 +702,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-06-25T06:53:34.385254662Z" + created: "2024-06-25T14:52:54.896727544Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -701,7 +714,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-06-25T06:53:34.380137053Z" + created: "2024-06-25T14:52:54.891563686Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -717,7 +730,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-06-25T06:53:34.375192687Z" + created: "2024-06-25T14:52:54.886840739Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -733,7 +746,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-06-25T06:53:34.374621725Z" + created: "2024-06-25T14:52:54.886301389Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -749,7 +762,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-06-25T06:53:34.373981855Z" + created: "2024-06-25T14:52:54.88564517Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -765,7 +778,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-06-25T06:53:34.373383522Z" + created: "2024-06-25T14:52:54.885028145Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -781,7 +794,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-06-25T06:53:34.372818762Z" + created: "2024-06-25T14:52:54.884468206Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -797,7 +810,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-06-25T06:53:34.372173722Z" + created: "2024-06-25T14:52:54.883815253Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -813,7 +826,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-06-25T06:53:34.371631885Z" + created: "2024-06-25T14:52:54.883268449Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -829,7 +842,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-06-25T06:53:34.371036718Z" + created: "2024-06-25T14:52:54.882705244Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -845,7 +858,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-06-25T06:53:34.370119312Z" + created: "2024-06-25T14:52:54.881823299Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -861,7 +874,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-06-25T06:53:34.36878117Z" + created: "2024-06-25T14:52:54.880463332Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -877,7 +890,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-06-25T06:53:34.368166107Z" + created: "2024-06-25T14:52:54.879820297Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -893,7 +906,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-06-25T06:53:34.367545843Z" + created: "2024-06-25T14:52:54.87918108Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -909,7 +922,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-06-25T06:53:34.366874793Z" + created: "2024-06-25T14:52:54.878467744Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -925,7 +938,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-06-25T06:53:34.366251454Z" + created: "2024-06-25T14:52:54.877795425Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -941,7 +954,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-06-25T06:53:34.365625409Z" + created: "2024-06-25T14:52:54.877128356Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -957,7 +970,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-06-25T06:53:34.364985248Z" + created: "2024-06-25T14:52:54.876499117Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -973,7 +986,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-06-25T06:53:34.364262682Z" + created: "2024-06-25T14:52:54.87585986Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -989,7 +1002,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-06-25T06:53:34.363203034Z" + created: "2024-06-25T14:52:54.874640847Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1005,7 +1018,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-06-25T06:53:34.362267008Z" + created: "2024-06-25T14:52:54.873983756Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1021,7 +1034,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-06-25T06:53:34.361714361Z" + created: "2024-06-25T14:52:54.873429929Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1037,7 +1050,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-06-25T06:53:34.361168797Z" + created: "2024-06-25T14:52:54.87285366Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1053,7 +1066,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-06-25T06:53:34.36062142Z" + created: "2024-06-25T14:52:54.872314971Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1069,7 +1082,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-06-25T06:53:34.360065867Z" + created: "2024-06-25T14:52:54.871773206Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1085,7 +1098,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-06-25T06:53:34.359420647Z" + created: "2024-06-25T14:52:54.871123439Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1101,7 +1114,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-06-25T06:53:34.358722316Z" + created: "2024-06-25T14:52:54.870471407Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1117,7 +1130,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-06-25T06:53:34.358149962Z" + created: "2024-06-25T14:52:54.869913914Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1133,7 +1146,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-06-25T06:53:34.357590262Z" + created: "2024-06-25T14:52:54.869352352Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1149,7 +1162,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-06-25T06:53:34.356996818Z" + created: "2024-06-25T14:52:54.868708877Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1165,7 +1178,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-06-25T06:53:34.355655254Z" + created: "2024-06-25T14:52:54.86743294Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1179,4 +1192,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-06-25T06:53:34.354929102Z" +generated: "2024-06-25T14:52:54.866566006Z" diff --git a/packages/grid/helm/repo/syft-0.8.7-beta.13.tgz b/packages/grid/helm/repo/syft-0.8.7-beta.13.tgz new file mode 100644 index 0000000000000000000000000000000000000000..212dd783e39342da9d167fbd5b602e4d18565d35 GIT binary patch literal 12296 zcmV+jF!#?NiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PMYMciT9!DB7R>D{z&b+nw2wj0ZuGTC3N+PU6mZCb7M3r+dxW zYcFp+iZUk@sUayR?(Ur5eimO6Nl_2WvYeS?d`K(;C=`G~RiRKQBzInV_~u5&>EKR| zgYOB?7%>xV}Fw7S?3UYQ9MTBbUeB12mU8OGL*qx z;wLByk&&rH>5WWL3}fks3GzdfL}Q3TKoBI5pakMuh!1X}F$~d7nog48;o-DOX zqw(R{1i}w~2pjGzuamgW&-}FVKEw<59K{-t0Z&eY6eXZzNKrwE$ctiR z;1+^tGR~GYdV7XW&#utr@fEuI%gH6WIzvAny}vw0C#UHA;_U6mH&-WTrx_kHG||}q z4a{W@F&UyXp2ESw3-r^j)4{|)5eEomOse2A zaTKM+2poS&V~HTV_2Vea`iX93K9~^R4pBG_f}-4cWKbX#1PS_okqpBq%^6dYH)7uB zIc53$%h9|@9n5KD03{HBPNO&<;z?vOAYo+Svi#m|_X3h=noZPuf9$8lOc!60^B59H zhbWNo2eN?&H2qFf*{Ng+N! zXq^914D1VZS(HM*M8R|nD5Hu@S@`f8C6H#iL{WTzu5uzsP#C2dRSoj`w|T9s(udbW zL@>rB*s@j{jf0HSAOorNg9Pat(4SD=VxpzzMS(&6V$J?Et}sO9Dz1;Q=LdOS{(psh z{^Nd)RB*c5xMUgKu{q6sOduX2X~uq7t&(Y&qNDSZY#g({k0ySFjGU%X{;m3eP#*=6 zmcg}}>Q5O3q>!Zj;>WB^Mh@dBg{n|31EE428tJ6*wJ3RA@Uw212ce6! zC5I!OT<6~h|45=RFFwb5HV`>U$I-2y$qtm>K(#*MOPX=fAh<(OjMAGpnvQO=ud%Gi zzd*FL{^ujS@rqi*wg|=*x;%lZaGqg;E+M}4HMHO)#n(MkL^75+&ifHvGi~-^ld(?snemWsd^zq}amcVO z*ju)J&-8%Q4@Y@F=Ip!>kjG=`2SenCUiAM*69~uIzJCzKnF3zOE)$KVpNX2pH!wsu z{^;faRZQS@F1AkVQ2p=&jHCGO*C_r3aWX`n43cc>{yBvyoaahC=iEL(D2PVyvrS6T zehyVlM6C)xLk zgJ$HGm;daiH<^e+KV-!#&iO-L4a!P|jH}^r6D9d7PbY%}LNkheGYCtKqS#4sN94b} zI2sR87{xFN?iMD!zSEKu{}bF5AZGl#`s?3n3$|KbzAkq1Po<%hWVu>RDXk3;*`#%} z^Qdgf4@WU1c{elao%5y)^(fz`jOC|tfO9=OE0%GCZsPd$20}DVvf&Fye)t8I4dg={ zM?nB_zP6%iiu{sGpj>A_uTTvALQv(+L5};k#k%i(UuqS_o?`YZTlu|g zbkZOxCK=kSwW5Kxrr+N%RkkQ}^xHHpIW!RO=MhioRGuz=r0pZjTa7(~+al?-I4p@%SZqrE9v&0X^l{nvm0yPRk|r~9P*{3nI+B#}&*wk;NVy2Tx{=|hTm#H>Qa?;m83dUk^84>-P&fx<+#7xU zT9{q`9!+BuLjW>?+$Jl>VDiEj#m8pxs|W=k;}DIb7?4z%29xQFC4^Tf-)bL}hC}o{ zx`AMfGG;wZx}c(dxbQidNS)=ip^P9!G6=fB} zn8O&x(Jh!kQSbNs-(IHIe}GgPnabX<{F>+R#?Smy3A;B${8cmOMkY7CA?gWCT%jyM zxlslr<~9c7FvkJQP^^UQU`GMCj^i<#Va+|4*Z_`aIl?BM(`&(%IxQOPNn?7mykY>p z7KMA+Ui;|$q=`OTg+5z?z9Pysgb`_Z6kFg?!)>EoqNHNVwYcHjA=ts(=9Ez|;MkVO zC?;HE$Dwj7^#5}8;r*HV2k5lf1G1_IWJwP&!N!$ROP71hGoE$~@mygr(L@>Bwj5@p z5SHXF6dy_5{M@l zcR?}FSe60KnGlYn4b!%(fM>;+?I}!L4+~9gvb-n7La23SUb1JcLe*M^sum8DLL2M~ zph{a*Fv*E)2*H$T;o1_Ea)AkgH8BojRAaDg*S3u%0dG3!uO7HMJ>$4GbChDnHb%PG zj!lgu0?ZShJV+Tu=8t&O3vRSq~!4=n@ri=@a zT4>K^TsaZRvtG zt`UZS&9RWKN2E)&=2{zqT}OaRE#fNAbu4Oe&y`!!_*To&P>(W;5SI#VJ3tv0(y_+&jZIu-Y)5KFB%=x&g@rMiXkxe#o=Yr_*>+6wuaP=QVcbN2D_LYqp|#Ld zDa#njEKN9e6qcT4n6fa_hAE*eD!6c@5L|fo9*=5H@S7LShFW%If6GgG91AU zbOGQVc3s!PQrMJv#Nr+&9s|OaE2K-XBu02f63-TbxfaD97={@)#G|gux9ZN-XFohT zKY0^{DSSzP@!|6hXh;}lI|NgMl_glFxp+)D+Oh$Iv<>5q%QOe2h@grICXFzha?UiN z+V!@an0!i(&QEsK39MY{8N<1iDUzINEGV~ua!Q#+6%dB0VlFqf<$6RbsWj&txRRh1 z;oGg}vJ(@(5(hiz2w^r69y1zj>`B|D7A1gG^WOq*ru8|oM2@+78l9} zB{AKqBj>SyD^oZ+KiOeVFt$JwS8zscW?2}hWFW>I4??SaPF z1L|tqav#Emci$P#0ZXB9CPEx(OQnP$o~M;dIe>7rF@jTRDTiV1NNFiSJOv(=t_QHC z#bNi|0L~o2aw`=uWeEYqaX`7mvuzt=O~K}tQj7w#9BhL}Ek<3i9LgN!k?k#4@1xNU zM4{u_mdTN^1qBS;aHDMHGOCTV031-6;z3-?m5O;nGDl*r2za0gQ(}+S5@~Gkh@-V} zgr~vSw(Bv6T80X#EXt(xIOW1pSR2a|6x)uFAXTQVQVX_|+Lw2szDyS^Q(KXW#5S>o z;GiAwuuWZ{#?1tT=ju$45+Io^EI|brNi;B9c*Jo90OP{mtHS3}?h%!ZwWVCcEh;dU zgb>fwmNJGqDpRFh;TQ*VV-)oi(*!$OyB0RY-NFR175NgyJwvlSh*p+sdkz*%+Lq)( zFxyx{NDTyxYrC#Z6;ar7HE2)RwnuH7=&dbN=K%38G*OL#xt8Bz7&=(Z(ok1Z4DNakeqtH2X7W=>Iu|DJ<@Yx9W>wL{TuF61K1~(GJ1f zv6R%>Pyv?5ZJRhmGw_7K+27KkkZm-9V$ZX;Gf~vfRd!|)=M+nDJWpedx$r#cP@$Dj z)E1JsUdHb!FzqpoC9`c?YGwfyMA^n*d9S^FH}l-lj95l_5<7%>Try86z>3+_ay^e4 zTL5#pG(x*Tn4`HQlzLiZI|#>UZ7Fw!e!aRMzknb3=1mlaP}O+w=AQ`H2B9#~E=a&e zdEC{W;hJjdVGy7scNt&}(k5IGK`omLs;H0>EVXS{>YM7h+vM(c)~-W63Q8ModqA1A zxx~aG*agOv6xwoxQbJ2^Qzj_W9tSOKi+YajQ0HO%xNE&^H90N_ZJ6uX()C;nj^-}6 zICX5!DF^I2uJMTNIF#8^dxl8c=3uZTl(d9_?YP)qBXtsb(GHfo;~Hfwne!`KSs9;I zKs-)eS87jfJuetgu9`rmeqRU7asmBwJAvx0J=HQkGQ0 zwp}7M0f8-LQxHmn78Z6m_CR9nU`{MfxvQ9P1p{rcXKlB6OCWv|8Q6hM6-$vx2*W*L zI~XfF6B&Ygo)lEr25XNw+A>CSLNFng#W1mi@wB5nqgEXY@BGY(P)A@cxo13<4JHTA z;X+Za9qih+cB$j$sy}GVEbbZ!u41+_Hlb9??MxS$d;n7o_+{?>urpTVDn}}6IAO-I zFt(L3gi%);tdz}c5MWb+v9u`_#!*60$(1KG))rTSZnyJ4j{;xcT}*>rupvgI>rmxd zHn%iTA+w#1ZBwdkPg2`>z&ycSDI97^O^G4cVlMUwmRw3_yE!QY#7_I+Du)8IwatKF z&6QBXR!ljdiIK!m;~AZ4!BlH-9UC)g5eGD-#HPd%+b!dEKi))PlFFTI`CV`*F~%^5 zDccg%bGea*DsF?4;7LO%_dLthuB{wbSsEBO+I6)>sghzlrTv3U^$jGu&}#`ef=in* zDlLat+yRFf#ca!X#&fAM%#x14E;FvMsqw%8!ffE!ag^Ttc)7VSV))q)f*qhwsb-FH zq_G_<+e0viU}jUWwdFXJ8V1xQuCTF3v-zNg7+@ZihPf6nvb|6^`y7JVK?fAqN?Dc> z7WE`2%9X~;W&>L|2;E}Gk_WN z>0}g31BFZCQ@dS$GpXhOdtKae8l;mQMuJ%r|A}H^wfui`m*8jr-#w(?e?R>5!JBA2 zjzW}s`l3@ghJ%lZ9OaiVfB(IY{7?r|1E{C-as!=}3*{JkXn?+cJ^1tC*RKazK?9y2 zc9!dxB_^>SrXK43De3{F(^tPMxV)^n?UtirdYg9MBzkWTqfnYpHZVvNN$V;uG zg4Xaa3{G(&|NV}J=n=ZSWBRG?c~(8%l!)5(9(SbboqlRw^qlse+VX!!+&feO*2sU( zar^!cC;U17<6ctE=guQD<7z1I$9{??`K83ZqIG3>;=eKyXJk&>*X|9_8m~CSG7|cu5M`t0R$eZI*Vo}_Ue1*ooj-yvaFpyVe zSc+Fg0J2x*CHPl}R)1LKwfJR>b`kkXi}=257Y%Eu1-_8{(YL?PfUNr;!bCCIj4RF55k)_ zvI+UxO=xU00xmhU@1Yx+{0;8ryn0Z`m8w8!;7dF650plKm*Zf*T?`Uf*qa_-T%27D zQE@H5h-CAp;4YbA@bV@4 z19{R97PhL5ZAY>1xP-JNj}xS10tQ84DHBl?qcSi`8NC;sct}O*O^(ncj&6Me=2f#0 zO68|qs%kJ0Y8V`pF@+M8MhM1}^sZ`8hM7)K(7~Xl2Gqh6bZiReGPSM^RPwdb2j=^r z;u`KYFlS#klD;o%jS{+jtIeJ~1Pj{Iie`{dAHB^W{dkrK+m=D57!^JWwL-HHI3+pE z^{INbC(@@H%9QV+L9KK7&}ZLkAG_COR(DLic#T8$Gn-NpR@IpVBGpx`L zUUneYv2dSSjo&)8_kU%m&Yc>7*X;i+E_mDiYm?{r-+M{9ym=o*pQe+7r{n?FkPPRi zF=gm4^y|s{_jwrZOcdD`GN7Y8fEW}t2WvKq6={uXtF-vmL%k##LmB<6Ul+PhjCGfPHR_+sI1>V8slf_kU~SCiZ0q}Cs)j$IUgL97{H;u7miJ>& z%xmwK^ic1z9O^;U=wkScvbmRo`~WUjNXn`7a($o<|Hn%Is(l-Lg1eU`J`F^Fs`Rl| z;TrBU6=>FCS+@>uetghtW-l0@n}*y>p^_DSM=_FR47$N}>P0v@_ zyAov8VA+qzLji=N^ak11n)R3o*o_us6WKg4W%coW-c}FUR(+kKVxc1Ru?Q%b%GB4h znEyq;e$3-VmdG1F$-{EJ+!e~RL9G7j=dCVwCLj3@+`nli)93^0qt{UsWI*aFU`E~> zkz}^y%08M1@<8kP5~#IQ%yI@aOxx{T-9SZgCCYQil?16U^%KPCU>>rxF#>g3os$-KF24UFgGCPAjDVWu-$zWAnWb#ZIp~~Z zE5?p8owXX-vh^BQZDG*h)^o&vT2io~oww+(4yh!C;ZIeml6K~pF}l3%tjbX^xsj*S z@eI}W!dv?|*ShoBBT`j6KJH+e>P^ z|3wZy`JoxkVrGuZyEnsG?CmyP9H8=ad^lTzZGINa&cFXaVbkuDN8M~4SLRk=rjtUeA1`vXdlgDOqW*9B1*M`zlBA8 zkF;j}my-KoFN3XH|AMyn|H68H|8GC3CNy((GgDZaliw@cN;=s@9;|zjOZ@(O17^PN z9-mDV0sqe?@}E;Us1WLm3f^JvZK!hd!;qQgRqIcYQCKHBz!%=GZ6$Q@OJSui-dh_i zyi-ct}H4GyM2;EPwSiAmnNzj)%e@0+CMzGHRQ?%p1QbLLSy#Du* zR>}XPV$xTr0WI>_%QElx`}br=ZEVX&22eXhK0?jL+r(c$>n-VgZ;woFC7%5>`UK&^ zIK@ipft;o{QSASgcY5&2Dcnt~5xj_kRZ3sgPMj6nM`z<{0C@xbOwoE5N7G3%ME}+6 z{dW$dc7XR^y-8$}-m9D$dj4oEC;6|Et(O&UAy#?eBG!FYAlqfXLcPFG^8Y`}RNrKO zOpEKYuTXDNeKHV03OT3^@i`2lQM>8R20LJ?Sw;)+KO*(m{gIPN($#d5$`rh5knDU& zwv zcpc}w{;`6SKPt8s((k`F_XD%1$ma3x>uszpUU_BI3$@-ZtBO{L=GhITxGkKH#%rG! zJ5<>|zYWvpbJ~Fa&n5o$GGI0Tr-V_vwg1Bwf9C&tNp=38seN<(>av{r%T;@-y$XUD z@^`qB;xWTQoxDtAnZoFHA9%d(L<_lp&=ab-K)m zK8ehS{D#^X!tzr35S_&aVlWpV&0Qu3%T-!-qc$a33K~7yd@7v!qbVEqrq)7BjLcp` ztV7KTmOgY#3NJyYCNEYtS$l4=HZ%2S_Ew-!uiS}83yX>xm*G=?%W)2*`NCuF|G2c$ z%;RtqbWrsKBgD1Y@3*!Hvof`7`uw*SQS|le;p~CU!%^&;ehF{5Sm}0t zLABMy`7{VBl0FJP%e$mj;(@NejX|6{l+OHgyYtUWr_lNMRzAU1g79|Oh-mPlW^+OD zG)(<5GzwHB{x|3H`uyze_36=v<3<5g95w$ON8@2D10m0cz?3_^&b)J(-VA4pJ~u@z zZ9Z%I^8Dz{Be$RXjCCUM=Ir$9=;ZYH;`%RVmsed_3{mfslk{3coGBxn08h`}9$#Pm z{k*eBiyCj~>_78?C>{asp+C?+r%?*cscyBhY~B`I$tu-a`LTAlJ|)8vTE72V1NhCw@zK@s_4&ow+mCOKFP5y^>T%e``X{F!jz64T z{QdgZvx~nSUtDg6UhBo}O{u78D5P9K6yQ$yPfqmP%z z*C(e}#}}tZ@2@Y8-<@1uUHpAB96DZMU)EZ;C8hPY{JKGV{qfD;j<4<^coth&fnGyZ zT!nR~{bv#9*B{@#JHEI+K0SK<{_~Mt7H>6`aQzZ_p5U0oet7E0-AEv7~}UClgP z=YsIIt5}zvGAp{=TD7Fr+QDUeFyzGVXW6Zu&&-79%_GcwRPy+3AF^%90?={xwJ;GK zW~=_t$W$Kc7))~5YNBXA%YrcVWsvl2z`) zM+%zQ-lXZ|9i(lZF)6T@6aVm*9#)2~|8J53_^A%9T`wQ0&hhl5A53Gox``pVi2^f3 z3^xi)Aoe432|5Z*GDM`F8~C>nLXvFTgA(%wn9WgdW5ye~sXvBjn$EKMx^CCt9aZoL!t=Z*JVG zV=%q-brgs1D^u4JUVU_azGVY)GFb+EC2?)SiQ6^q&5&2I(4ym?APynuC!nXXpWd~) zUT3q(U2<5IKI|-AK};v(zJc7GXE`tfw} zu`?gR{@jPAN-gPQS)A{Hqi@W@SK#(EoP2W@z5?|plVwrTDW-ebHnI4+c z@51T`8@a01xWL2?5A)R&gNk*Ef3~|S48U> zHn>9`Ei7AlAN*(UvW?R+|F5YZ7>M_B{|7V6TDbpX(P#g!eWWe#|8$eb<9dS?Z6fpn z`3CaGAd;JC8W^MiC2|Z%7SBlhBT_#kXjb#!nH#J~t@ZE4nX(s}*&lralOVbq!?ufm z{rYE=-R((&c^%#a*axz-6Fj-_1Itt^7Ba0eGMf=M3z9{W!ZT?$DQW#*%mC%vk~a=1g0&pb12RZ&HZ_TKA@(@-gtZ&rr-K-#ZtwpveUAUMpLEarpHHLrJ#u5KbO~!8~=%5x^VxSKJWkblCq@}%WyRJ49e&JVy1X~{h9}7s3}ABXTzRv^8Smb z?)C47qZpEhuKOyjv;PQ2TI=7&{Mr7ymsFSkA*7$9_>&)w7J{vu6a>+s5tO9zs0zWn zjdBMU#WQwQk!sCcu#i_g3Y@pJ5b~+DC+w)w4hz*N(%5o0E4LiPTD!s}?aD<{NQdpC zt8~YpHkz#csPw8;eV^Z4C_lfczAo?b;$4IW2GzZV@{&zqi78agCB~RZ9DTW4JRmM8 zFVCzSQW_4t3;Jru9F&UMY>ygF#|q+7ajONNx`~psv)5Thmp65`39rCrJMW}pw3qa) zUmck?PB)vQv}_c*`qa_0IbK6_*!X>wE86EVc>b4zu2H=&b_)i`-6FURO6_ax_s8hR zmp{sh|MJJdpI?5H$eBndf5c!Sy>t9#?$MiL$QWC|Q3a7YTsD9w<4KUmLC6`~ z$xqeGd#M;`$yreS@TVyI)N@`>tJeQ6Vn2Sj!TX=q;`<-MdOrWzOWHR6nv}gU3yF3!5NsX90xmVKhSHifO%h+!H!xx^cO6M zZH2R7USKGm!o#oV`3m>GR)GgxSS=#joSdFqy?s58WU*c7oOXDcS=N2x;4LEbZyIi= zbB%0v)plNXrgF}3vn{4}xnw?BJH=b;vWb^ad|+Y9giJ?sDbN$U=! zJM9B+a?1E1CmkDuI_*5_{-8Mcp1t#T7s|1(2Y2IOi2hhOgfF%58e5oMeyA~uAd=>_ z3}mPwZZhnwenZKs*wQo(hNzb}bvSdHo(DDRI9+{cBMV`hW@oayY-JAAVT@Ic#nCiv z)a@F@cpAVE^)g9VoHhUWKZpPGve(en()jK?@;fAS?=z@g$5}M$Z8W%DZOe)OKQGZA zf1t)!6X~SmbAD{T18f&&Nd=MWSCVKA##z}N;4quRdiK~iCVV&#by53;630@#WJx$VRT*~my5(xbzcgAVbXLpksK{wQZ*q==DF>c2)$-ZS< zntbFNd0v$17lBM}W^qo^Sc2z&s*H=sYA!si*|tm}#igytxW!n<)aO+F^M zUQ*;2EHBr6-bbS$`u%J1DTqe1%|6^hFhnP(Kc5wO9dz*-)VzDrm>7~|h=vY6sEm|; zIO;Cp(4tJe#Jq(e>SYM^`t|wo{GdkI*{?z%973uO%P~8wvFT?Y;%_0X(EoOD z^Semv^gmlH`2XAXv;W6l(tY&5d)yDNyb>Q<{rkplhF9JG#;=7peMUP;U{i~3yY!Y8 z-5HpzEV`YwA864nVBXiF+fi$W7Tww2YMDj16tlxyIv`|#pTe?RjKB)ZZmYn(Z*5mB zYO^cb^U}M!uU$;TJ+Exns{7$+quLJL!mL@-wmMClWnR8R-`Kd>h`rYg^!9+u=-EJQ z9OJHg)Se`|-3As|v>3*&k91`=sgFme)nw^}?ObPGq5tj6|A%0<;QvF|^Zl>Aq^<0K z%l(7)rS~n73RT3#wi3}&Ai9M_N96_NIxbN?jf+w>3>_{?b?xr{m(!nj<^NZu7XPoJ z1}fXR>y3`N@=@#R zhDOI*86O6|I$U5BMQWk3qoIU=TUb7^RP4(5-m|Yf7Wv>lN91Xfm8v!{FFv>hprbWf4DSva>_alrYwbO zXEdEpi#FkRbmRp(4i}--mia2|JwIq6Uwm1EdtPWQ>J8vaF>k@z5b<1FIGXsyAC*~t znnvTOsc~)Qh1*{%slB zbGldBi2pzE4Y+mZf7EW>|Du#w&-{NcX}kB|mJ-LFxc5_!+P_cQeEn~)0It*js3qF^ zA1BZLUwcUp+W#+i0;tD{-3LQ>S~q}tZ@Xef<$L|w%goEYvfTM)wz^Ni?DOm`uxu0F z-{cJv$8owGy{gX@z=UmtpVN(>)5b5~N^b}=S=`L8>Sht?4ht!+2$ii)wYb zd^oRsxS|so->Vm$AK16ho<$E_T4JeZMR)JZcs0$w&(G>N7?TcXiN_g~nhVCVQCVJO z|HIPd%upYgPJe|JBawXelEbDq#Ij*nS?cLVqA4F22Ba7a83d)IPEZ=5!+07VmcI}4 z5-Uk&bz#(yUPZzFZPXs2^_}0i?{$WEFM-{tcrVGl-OckQI(d^aIxnX$rKt6LyY-aO zUq55T$9DX>&PTHD({dM?ubJk?48JG?l~n`1+1|ARw8_r(zV5S~@^D}Nv&|jFIKvy?@f|BVZ-#((`27(|Deppya{c!XO#SqBUzlD5*Q~NGM zbMOL%Fv`Uj`mv0mqCFa!r)-=_VW5Njbb=>f!^et z3;puu5*a^E4hADXJQB>2uFNI}e;y>C zCkKBXDEaB&&x3TF{T;>r=-|))```uoMaF(KP0-2P Date: Wed, 26 Jun 2024 09:26:16 +0200 Subject: [PATCH 307/313] fix type --- .../syft/src/syft/service/code_history/code_history_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/code_history/code_history_service.py b/packages/syft/src/syft/service/code_history/code_history_service.py index 2c01b26fd1a..41839045747 100644 --- a/packages/syft/src/syft/service/code_history/code_history_service.py +++ b/packages/syft/src/syft/service/code_history/code_history_service.py @@ -126,7 +126,7 @@ def fetch_histories_for_user( credentials=context.credentials, user_verify_key=user_verify_key ) - user_code_service: UserCodeService = context.node.get_service("usercodeservice") + user_code_service: UserCodeService = context.node.get_service("usercodeservice") # type: ignore def get_code(uid: UID) -> UserCode | SyftError: return user_code_service.stash.get_by_uid( From 0db00a2e073bd16d58bdb6fb13147b0f626d98f9 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 26 Jun 2024 10:44:01 +0200 Subject: [PATCH 308/313] fix mypy --- packages/syft/src/syft/service/api/api_service.py | 2 +- packages/syft/src/syft/service/code/user_code.py | 2 ++ packages/syft/src/syft/service/code/user_code_service.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/service/api/api_service.py b/packages/syft/src/syft/service/api/api_service.py index 0f8405db166..a302a7805d2 100644 --- a/packages/syft/src/syft/service/api/api_service.py +++ b/packages/syft/src/syft/service/api/api_service.py @@ -65,7 +65,7 @@ def set( new_endpoint = endpoint if new_endpoint is None: - return SyftError(message="Invalid endpoint type.") + return SyftError(message="Invalid endpoint type.") # type: ignore except ValueError as e: return SyftError(message=str(e)) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 35691e8e5e5..a3179893316 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -1405,6 +1405,8 @@ def compile_code(context: TransformContext) -> TransformContext: def hash_code(context: TransformContext) -> TransformContext: if context.output is None: return context + if not isinstance(context.obj, SubmitUserCode): + return context code = context.output["code"] context.output["raw_code"] = code diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 511d26ad86c..9ae54dc20db 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -265,7 +265,7 @@ def _get_or_submit_user_code( return Err("UserCode not found on this node.") return Ok(user_code) - elif isinstance(code, SubmitUserCode): + elif isinstance(code, SubmitUserCode): # type: ignore[unreachable] # Submit new UserCode # NOTE if a code with the same hash exists, it will be returned instead user_code_result = self._submit(context, code, exists_ok=True) From db6c76d65183cfaf9dc4de086da522cfcbd1ab38 Mon Sep 17 00:00:00 2001 From: alfred-openmined-bot <145415986+alfred-openmined-bot@users.noreply.github.com> Date: Wed, 26 Jun 2024 08:52:53 +0000 Subject: [PATCH 309/313] bump protocol and remove notebooks --- .../src/syft/protocol/protocol_version.json | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 9ff9826a945..5054b847ee4 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -290,6 +290,11 @@ "version": 5, "hash": "128705a5fdf308055ef857b25c80966c928938a05ec03459dae9b36bd6122aa2", "action": "add" + }, + "6": { + "version": 6, + "hash": "c48ec3160bb34adf937e6306523c7ebc52861ff84a576a30a28cd45c224ded0f", + "action": "add" } }, "SyncedUserCodeStatusChange": { @@ -354,6 +359,20 @@ "hash": "ba9ebb04cc3e8b3ae3302fd42a67e47261a0a330bae5f189d8f4819cf2804711", "action": "add" } + }, + "SubmitUserCode": { + "5": { + "version": 5, + "hash": "3135727b8f0ca7689d47c04e45a2bd6a7693f17c043f76fd2243135196c27232", + "action": "add" + } + }, + "CodeHistory": { + "3": { + "version": 3, + "hash": "1b9bd1d3d096abab5617c2ff597b4c80751f686d16482a2cff4efd8741b84d53", + "action": "add" + } } } } From cc9ba30fe949c359f94b03b91b73085475d49424 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 26 Jun 2024 11:17:03 +0200 Subject: [PATCH 310/313] add tests, cleanup --- .../syft/service/code/user_code_service.py | 22 +++++-- .../syft/tests/syft/users/user_code_test.py | 64 +++++++++++++++++++ 2 files changed, 82 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 9ae54dc20db..6264746e549 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -76,6 +76,20 @@ def _submit( submit_code: SubmitUserCode, exists_ok: bool = False, ) -> Result[UserCode, str]: + """ + Submit a UserCode. + + If exists_ok is True, the function will return the existing code if it exists. + + Args: + context (AuthedServiceContext): context + submit_code (SubmitUserCode): UserCode to submit + exists_ok (bool, optional): If True, return the existing code if it exists. + If false, existing codes returns Err. Defaults to False. + + Returns: + Result[UserCode, str]: New UserCode or error + """ existing_code_or_err = self.stash.get_by_code_hash( context.credentials, code_hash=submit_code.get_code_hash(), @@ -289,10 +303,10 @@ def request_code_execution( ) -> Request | SyftError: """Request Code execution on user code""" - user_code_result = self._get_or_submit_user_code(context, code) - if user_code_result.is_err(): - return SyftError(message=user_code_result.err()) - user_code = user_code_result.ok() + user_code_or_err = self._get_or_submit_user_code(context, code) + if user_code_or_err.is_err(): + return SyftError(message=user_code_or_err.err()) + user_code = user_code_or_err.ok() result = self._request_code_execution( context, diff --git a/packages/syft/tests/syft/users/user_code_test.py b/packages/syft/tests/syft/users/user_code_test.py index a1182b1630a..3e9cb975580 100644 --- a/packages/syft/tests/syft/users/user_code_test.py +++ b/packages/syft/tests/syft/users/user_code_test.py @@ -393,3 +393,67 @@ def valid_name_2(): valid_name_2.func_name = "get_all" with pytest.raises(ValidationError): client.code.submit(valid_name_2) + + +def test_request_existing_usercodesubmit(worker) -> None: + root_domain_client = worker.root_client + + root_domain_client.register( + name="data-scientist", + email="test_user@openmined.org", + password="0000", + password_verify="0000", + ) + ds_client = root_domain_client.login( + email="test_user@openmined.org", + password="0000", + ) + + @sy.syft_function_single_use() + def my_func(): + return 42 + + res_submit = ds_client.api.services.code.submit(my_func) + assert isinstance(res_submit, SyftSuccess) + res_request = ds_client.api.services.code.request_code_execution(my_func) + assert isinstance(res_request, Request) + + # Second request fails, cannot have multiple requests for the same code + res_request = ds_client.api.services.code.request_code_execution(my_func) + assert isinstance(res_request, SyftError) + + assert len(ds_client.code.get_all()) == 1 + assert len(ds_client.requests.get_all()) == 1 + + +def test_request_existing_usercode(worker) -> None: + root_domain_client = worker.root_client + + root_domain_client.register( + name="data-scientist", + email="test_user@openmined.org", + password="0000", + password_verify="0000", + ) + ds_client = root_domain_client.login( + email="test_user@openmined.org", + password="0000", + ) + + @sy.syft_function_single_use() + def my_func(): + return 42 + + res_submit = ds_client.api.services.code.submit(my_func) + assert isinstance(res_submit, SyftSuccess) + + code = ds_client.code.get_all()[0] + res_request = ds_client.api.services.code.request_code_execution(my_func) + assert isinstance(res_request, Request) + + # Second request fails, cannot have multiple requests for the same code + res_request = ds_client.api.services.code.request_code_execution(code) + assert isinstance(res_request, SyftError) + + assert len(ds_client.code.get_all()) == 1 + assert len(ds_client.requests.get_all()) == 1 From 51e101348bd1e1630e80ff174f7a7ee525a51360 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 26 Jun 2024 11:36:47 +0200 Subject: [PATCH 311/313] typo --- .../syft/service/code/user_code_service.py | 23 +++++++------------ 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 6264746e549..dd45514dd6f 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -271,24 +271,17 @@ def _get_or_submit_user_code( """ if isinstance(code, UserCode): # Get existing UserCode - user_code_result = self.stash.get_by_uid(context.credentials, code.id) - if user_code_result.is_err(): - return Err(user_code_result.err()) - user_code = user_code_result.ok() + user_code_or_err = self.stash.get_by_uid(context.credentials, code.id) + if user_code_or_err.is_err(): + return user_code_or_err + user_code = user_code_or_err.ok() if user_code is None: return Err("UserCode not found on this node.") return Ok(user_code) - - elif isinstance(code, SubmitUserCode): # type: ignore[unreachable] - # Submit new UserCode - # NOTE if a code with the same hash exists, it will be returned instead - user_code_result = self._submit(context, code, exists_ok=True) - return user_code_result - - else: - return Err( # type: ignore[unreachable] - f"request_code_execution expects a UserCode or SubmitUserCode object, got a {type(code).__name__}" - ) + else: # code: SubmitUserCode + # Submit new UserCode, or get existing UserCode with the same code hash + user_code_or_err = self._submit(context, code, exists_ok=True) + return user_code_or_err @service_method( path="code.request_code_execution", From 2fbeef5af90b506a1e46cec1aa0d98297441a6ec Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 26 Jun 2024 11:51:59 +0200 Subject: [PATCH 312/313] new hash function --- .../syft/src/syft/service/code/user_code.py | 10 +++-- .../syft/service/code/user_code_service.py | 3 +- .../syft/tests/syft/users/user_code_test.py | 45 +++++++++++++++++++ 3 files changed, 53 insertions(+), 5 deletions(-) diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index ef65d8e8987..b8df87fc619 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -1014,9 +1014,6 @@ def add_output_policy_ids(cls, values: Any) -> Any: values["id"] = UID() return values - def get_code_hash(self) -> str: - return hashlib.sha256(self.code.encode()).hexdigest() - @property def kwargs(self) -> dict[Any, Any] | None: return self.input_policy_init_kwargs @@ -1172,6 +1169,11 @@ def input_owner_verify_keys(self) -> list[str] | None: return None +def get_code_hash(code: str, user_verify_key: SyftVerifyKey) -> str: + full_str = f"{code}{user_verify_key}" + return hashlib.sha256(full_str.encode()).hexdigest() + + def is_valid_usercode_name(func_name: str) -> Result[Any, str]: if len(func_name) == 0: return Err("Function name cannot be empty") @@ -1448,7 +1450,7 @@ def hash_code(context: TransformContext) -> TransformContext: code = context.output["code"] context.output["raw_code"] = code - code_hash = context.obj.get_code_hash() + code_hash = get_code_hash(code, context.credentials) context.output["code_hash"] = code_hash return context diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 27110fd96bb..d1f76b2e317 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -43,6 +43,7 @@ from .user_code import UserCode from .user_code import UserCodeStatus from .user_code import UserCodeUpdate +from .user_code import get_code_hash from .user_code import load_approved_policy_code from .user_code_stash import UserCodeStash @@ -89,7 +90,7 @@ def _submit( """ existing_code_or_err = self.stash.get_by_code_hash( context.credentials, - code_hash=submit_code.get_code_hash(), + code_hash=get_code_hash(submit_code.code, context.credentials), ) if existing_code_or_err.is_err(): diff --git a/packages/syft/tests/syft/users/user_code_test.py b/packages/syft/tests/syft/users/user_code_test.py index 3e9cb975580..69f69ab76d4 100644 --- a/packages/syft/tests/syft/users/user_code_test.py +++ b/packages/syft/tests/syft/users/user_code_test.py @@ -457,3 +457,48 @@ def my_func(): assert len(ds_client.code.get_all()) == 1 assert len(ds_client.requests.get_all()) == 1 + + +def test_submit_existing_code_different_user(worker): + root_domain_client = worker.root_client + + root_domain_client.register( + name="data-scientist", + email="test_user@openmined.org", + password="0000", + password_verify="0000", + ) + ds_client_1 = root_domain_client.login( + email="test_user@openmined.org", + password="0000", + ) + + root_domain_client.register( + name="data-scientist-2", + email="test_user_2@openmined.org", + password="0000", + password_verify="0000", + ) + ds_client_2 = root_domain_client.login( + email="test_user_2@openmined.org", + password="0000", + ) + + @sy.syft_function_single_use() + def my_func(): + return 42 + + res_submit = ds_client_1.api.services.code.submit(my_func) + assert isinstance(res_submit, SyftSuccess) + res_resubmit = ds_client_1.api.services.code.submit(my_func) + assert isinstance(res_resubmit, SyftError) + + # Resubmit with different user + res_submit = ds_client_2.api.services.code.submit(my_func) + assert isinstance(res_submit, SyftSuccess) + res_resubmit = ds_client_2.api.services.code.submit(my_func) + assert isinstance(res_resubmit, SyftError) + + assert len(ds_client_1.code.get_all()) == 1 + assert len(ds_client_2.code.get_all()) == 1 + assert len(root_domain_client.code.get_all()) == 2 From 4e276d359ddcaf8f3761a276814dab4f78c711c0 Mon Sep 17 00:00:00 2001 From: eelcovdw Date: Wed, 26 Jun 2024 11:56:31 +0200 Subject: [PATCH 313/313] fix mypyp --- packages/syft/src/syft/service/code/user_code_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/service/code/user_code_service.py b/packages/syft/src/syft/service/code/user_code_service.py index 27110fd96bb..cd947cf325a 100644 --- a/packages/syft/src/syft/service/code/user_code_service.py +++ b/packages/syft/src/syft/service/code/user_code_service.py @@ -277,7 +277,7 @@ def _get_or_submit_user_code( return Ok(user_code) else: # code: SubmitUserCode # Submit new UserCode, or get existing UserCode with the same code hash - user_code_or_err = self._submit(context, code, exists_ok=True) + user_code_or_err = self._submit(context, code, exists_ok=True) # type: ignore return user_code_or_err @service_method(