Skip to content

Commit

Permalink
Merge branch 'main' of github.com:akanshi-elastic/connectors-python i…
Browse files Browse the repository at this point in the history
…nto remove-awscli-dependency
  • Loading branch information
akanshi-elastic committed Jun 23, 2023
2 parents 0eaac4e + 67db216 commit fda6c62
Show file tree
Hide file tree
Showing 66 changed files with 5,136 additions and 1,181 deletions.
6 changes: 3 additions & 3 deletions .backportrc.json
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
{
"targetBranchChoices": [
{ "name": "main", "checked": true },
"8.8",
"8.7"
"8.9",
"8.8"
],
"fork": false,
"targetPRLabels": ["backport"],
"branchLabelMapping": {
"^v8.9.0(.0)?$": "main",
"^v8.10.0(.0)?$": "main",
"^v(\\d+).(\\d+)(.\\d+)+$": "$1.$2"
},
"upstream": "elastic/connectors-python"
Expand Down
11 changes: 7 additions & 4 deletions .buildkite/nightly_steps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@ steps:
- label: "🔨 Google Cloud Storage"
command:
- ".buildkite/run_nigthly.sh google_cloud_storage"
env:
SKIP_AARCH64: "true"
artifact_paths:
- "perf8-report-*/**/*"
- label: "🔨 Azure Blob Storage"
Expand All @@ -43,9 +41,9 @@ steps:
SKIP_AARCH64: "true"
artifact_paths:
- "perf8-report-*/**/*"
- label: "🔨 Sharepoint"
- label: "🔨 Sharepoint Server"
command:
- ".buildkite/run_nigthly.sh sharepoint extra_small"
- ".buildkite/run_nigthly.sh sharepoint_server extra_small"
artifact_paths:
- "perf8-report-*/**/*"
- label: "🔨 Microsoft SQL"
Expand All @@ -63,6 +61,11 @@ steps:
- ".buildkite/run_nigthly.sh confluence"
artifact_paths:
- "perf8-report-*/**/*"
- label: "🔨 ServiceNow"
command:
- ".buildkite/run_nigthly.sh servicenow"
artifact_paths:
- "perf8-report-*/**/*"
- label: "🔨 MongoDB"
command:
- ".buildkite/run_nigthly.sh mongodb"
Expand Down
8 changes: 8 additions & 0 deletions catalog-info.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,10 @@ spec:
branch: '8.8'
cronline: '@daily'
message: "Runs daily `8.8` e2e test"
Daily 8_9:
branch: '8.9'
cronline: '@daily'
message: "Runs daily `8.9` e2e test"
Daily main:
branch: main
cronline: '@daily'
Expand Down Expand Up @@ -180,6 +184,10 @@ spec:
branch: '8.8'
cronline: '@daily'
message: "Runs daily `8.8` e2e aarch64 test"
Daily 8_9:
branch: '8.9'
cronline: '@daily'
message: "Runs daily `8.9` e2e aarch64 test"
Daily main:
branch: main
cronline: '@daily'
Expand Down
10 changes: 3 additions & 7 deletions config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,6 @@ service:
connector_id: 'changeme'
service_type: 'changeme'

extraction_service:
host: http://localhost:8090
text_extraction:
use_file_pointers: false
method: 'tika'

sources:
mongodb: connectors.sources.mongo:MongoDataSource
s3: connectors.sources.s3:S3DataSource
Expand All @@ -46,8 +40,10 @@ sources:
azure_blob_storage: connectors.sources.azure_blob_storage:AzureBlobStorageDataSource
postgresql: connectors.sources.postgresql:PostgreSQLDataSource
oracle: connectors.sources.oracle:OracleDataSource
sharepoint: connectors.sources.sharepoint:SharepointDataSource
sharepoint_server: connectors.sources.sharepoint_server:SharepointServerDataSource
mssql: connectors.sources.mssql:MSSQLDataSource
jira: connectors.sources.jira:JiraDataSource
confluence: connectors.sources.confluence:ConfluenceDataSource
dropbox: connectors.sources.dropbox:DropboxDataSource
servicenow: connectors.sources.servicenow:ServiceNowDataSource
sharepoint_online: connectors.sources.sharepoint_online:SharepointOnlineDataSource
2 changes: 1 addition & 1 deletion connectors/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
8.9.0.0
8.10.0.0
135 changes: 135 additions & 0 deletions connectors/es/document.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,86 @@
# or more contributor license agreements. Licensed under the Elastic License 2.0;
# you may not use this file except in compliance with the Elastic License 2.0.
#
from connectors.logger import logger


class InvalidDocumentSourceError(Exception):
pass


class DocumentLogger:
def __init__(self, prefix, extra):
self._prefix = prefix
self._extra = extra

def isEnabledFor(self, level):
return logger.isEnabledFor(level)

def debug(self, msg, *args, **kwargs):
logger.debug(
msg,
*args,
prefix=self._prefix, # pyright: ignore
extra=self._extra,
**kwargs,
)

def info(self, msg, *args, **kwargs):
logger.info(
msg,
*args,
prefix=self._prefix, # pyright: ignore
extra=self._extra,
**kwargs,
)

def warning(self, msg, *args, **kwargs):
logger.warning(
msg,
*args,
prefix=self._prefix, # pyright: ignore
extra=self._extra,
**kwargs,
)

def error(self, msg, *args, **kwargs):
logger.error(
msg,
*args,
prefix=self._prefix, # pyright: ignore
extra=self._extra,
**kwargs,
)

def exception(self, msg, *args, exc_info=True, **kwargs):
logger.exception(
msg,
*args,
prefix=self._prefix, # pyright: ignore
extra=self._extra,
exc_info=exc_info,
**kwargs,
)

def critical(self, msg, *args, **kwargs):
logger.critical(
msg,
*args,
prefix=self._prefix, # pyright: ignore
extra=self._extra,
**kwargs,
)

def fatal(self, msg, *args, **kwargs):
logger.fatal(
msg,
*args,
prefix=self._prefix, # pyright: ignore
extra=self._extra,
**kwargs,
)


class ESDocument:
"""
Represents a document in an Elasticsearch index.
Expand All @@ -30,6 +106,7 @@ def __init__(self, elastic_index, doc_source):
raise InvalidDocumentSourceError(
f"Invalid type found for source: {type(self._source).__name__}, expected: {dict.__name__}"
)
self.logger = DocumentLogger(prefix=self._prefix(), extra=self._extra())

def get(self, *keys, default=None):
value = self._source
Expand All @@ -46,3 +123,61 @@ async def reload(self):
self._seq_no = doc_source.get("_seq_no")
self._primary_term = doc_source.get("_primary_term")
self._source = doc_source.get("_source", {})

def log_debug(self, msg, *args, **kwargs):
self.logger.debug(
msg,
*args,
**kwargs,
)

def log_info(self, msg, *args, **kwargs):
self.logger.info(
msg,
*args,
**kwargs,
)

def log_warning(self, msg, *args, **kwargs):
self.logger.warning(
msg,
*args,
**kwargs,
)

def log_error(self, msg, *args, **kwargs):
self.logger.error(
msg,
*args,
**kwargs,
)

def log_exception(self, msg, *args, exc_info=True, **kwargs):
self.logger.exception(
msg,
*args,
exc_info=exc_info,
**kwargs,
)

def log_critical(self, msg, *args, **kwargs):
self.logger.critical(
msg,
*args,
**kwargs,
)

def log_fatal(self, msg, *args, **kwargs):
self.logger.fatal(
msg,
*args,
**kwargs,
)

def _prefix(self):
"""Return a string which will be prefixed to the log message when filebeat is not turned on"""
return None

def _extra(self):
"""Return custom fields to be added to ecs logging when filebeat is turned on"""
return {}
Loading

0 comments on commit fda6c62

Please sign in to comment.