diff --git a/README.md b/README.md index f36a8593a..60039ab21 100644 --- a/README.md +++ b/README.md @@ -66,6 +66,12 @@ + + + - - + + - - - + + + - -
+ + AmazonSQS
+ Amazon SQS +
+
AppDynamics
@@ -96,14 +102,14 @@ Checkmk
Cilium
Cilium
CloudWatch
@@ -134,14 +140,14 @@ Elastic
GCP Monitoring
GCP Monitoring
Grafana
@@ -172,12 +178,6 @@ New Relic
- - OpenObserve
- OpenObserve -
-
@@ -218,6 +218,12 @@
+ + OpenObserve
+ OpenObserve +
+
Site24x7
@@ -248,14 +254,14 @@ UptimeKuma
VictoriaMetrics
VictoriaMetrics
Zabbix
diff --git a/docs/mint.json b/docs/mint.json index 373398997..1ee51f5c0 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -111,6 +111,7 @@ "group": "Supported Providers", "pages": [ "providers/documentation/aks-provider", + "providers/documentation/amazonsqs-provider", "providers/documentation/appdynamics-provider", "providers/documentation/argocd-provider", "providers/documentation/auth0-provider", diff --git a/docs/providers/documentation/amazonsqs-provider.mdx b/docs/providers/documentation/amazonsqs-provider.mdx new file mode 100644 index 000000000..c636cf766 --- /dev/null +++ b/docs/providers/documentation/amazonsqs-provider.mdx @@ -0,0 +1,64 @@ +--- +title: "AmazonSQS Provider" +sidebarTitle: "AmazonSQS Provider" +description: "The AmazonSQS provider enables you to pull & push alerts to the Amazon SQS Queue." +--- + +## Overview + +The **AmazonSQS Provider** facilitates +Consuming SQS messages as alerts +Notifying/Pushing messages to SQS Queue + +## Authentication Parameters + +- **Access Key Id** (required): Access Key ID generated from your IAM. +- **Secret Access Key** (required): The secret corresponding to the above key-id. +- **Region Name** (required): The region of your data center eg. us-east-1, ap-sout-1, etc. +- **SQS Queue URL** (required): The url for the SQS Queue. + + +## Scopes + +- **authenticated**: Mandatory for all operations, ensures the user is authenticated. +- **sqs::read**: Mandatory for getting alerts, ensures user can read from the Queue. +- **sqs::write**: Mandatory **only** for Notifying/Pushing messages to queue, ensures user can write to Queue. + +If you only want to give read scope to your key-secret pair the permission policy: AmazonSQSReadOnlyAccess +If you only want to give read & write scope to your key-secret pair the permission policy: AmazonSQSFullAccess +Both are the policies are prebuilt in AWS. + +## Inputs for AmazonSQS Action + +- `message`: str: Body/Message for the notification +- `group_id`: str | None: Mandatory only if Queue is of type FIFO, ignored incase of a normal Queue. +- `dedup_id`: str | None: Mandatory only if Queue is of type FIFO, ignored incase of a normal Queue. +- **kwargs: dict | None: You can pass additional key-value pairs, that will be sent as MessageAttributes in the notification. + +## Output for AmazonSQS Action +For more detail, visit [sqs-documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sqs/client/send_message.html#). + ```json + { + 'MD5OfMessageBody': 'string', + 'MD5OfMessageAttributes': 'string', + 'MD5OfMessageSystemAttributes': 'string', + 'MessageId': 'string', + 'SequenceNumber': 'string' + } + ``` + + + - When using the AmazonSQS action, if your queue is fifo, then it is **mandatory** to pass a dedup_id & group_id. + - All the extra fields present in the MessageAttribute is stored in alert.label as a key-value pair dictionary. + - You can pass these attributes in the SQS Queue message and keep will extract and use these field for the alert + - name + - status: Possible values 'firing' | 'resolved' | 'acknowledged' | 'suppressed' | 'pending' defaults to 'firing'. + - severity: Possible values 'critical' | 'high' | 'warning' | 'info' | 'low' defaults to 'high' + - description + + + +## Useful Links + +- [AmazonSQS Boto3 Examples](https://docs.aws.amazon.com/code-library/latest/ug/python_3_sqs_code_examples.html) +- [Boto3 SQS Documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sqs.html) diff --git a/docs/providers/overview.mdx b/docs/providers/overview.mdx index 30c9b523e..8d8b0d7de 100644 --- a/docs/providers/overview.mdx +++ b/docs/providers/overview.mdx @@ -116,6 +116,14 @@ By leveraging Keep Providers, users are able to deeply integrate Keep with the t } /> + + } +> + dict[str, bool | str]: + self.logger.info("Validating user scopes for AmazonSQS provider") + scopes = { + "authenticated": False, + "sqs::read": False, + "sqs::write": False, + } + sts = boto3.client( + "sts", + region_name=self.authentication_config.region_name, + aws_access_key_id=self.authentication_config.access_key_id, + aws_secret_access_key=self.authentication_config.secret_access_key, + ) + try: + sts.get_caller_identity() + self.logger.info( + "User identity fetched successfully, user is authenticated." + ) + scopes["authenticated"] = True + except botocore.exceptions.ClientError as e: + self.logger.error( + "Error while getting user identity, authentication failed", + extra={"exception": str(e)}, + ) + scopes["authenticated"] = str(e) + return scopes + + try: + self.__write_to_queue( + message="KEEP_SCOPE_TEST_MSG_PLEASE_IGNORE", + dedup_id=str(uuid.uuid4()), + group_id="keep", + ) + self.logger.info("All scopes verified successfully") + scopes["sqs::write"] = True + scopes["sqs::read"] = True + except botocore.exceptions.ClientError as e: + self.logger.error( + "User does not have permission to write to SQS queue", + extra={"exception": str(e)}, + ) + scopes["sqs::write"] = str(e) + try: + self.__read_from_queue() + self.logger.info("User has permission to read from SQS Queue") + scopes["sqs::read"] = True + except botocore.exceptions.ClientError as e: + self.logger.error( + "User does not have permission to read from SQS queue", + extra={"exception": str(e)}, + ) + scopes["sqs::read"] = str(e) + return scopes + + def __read_from_queue(self): + self.logger.info("Getting messages from SQS Queue") + try: + return self.__get_sqs_client.receive_message( + QueueUrl=self.authentication_config.sqs_queue_url, + MessageAttributeNames=["All"], + MessageSystemAttributeNames=["All"], + MaxNumberOfMessages=10, + WaitTimeSeconds=10, + ) + except Exception as e: + self.logger.error( + "Error while reading from SQS Queue", extra={"exception": str(e)} + ) + + def __write_to_queue(self, message, group_id, dedup_id, **kwargs): + try: + self.logger.info("Sending message to SQS Queue") + message = str(message) + group_id = str(group_id) + dedup_id = str(dedup_id) + is_fifo = self.authentication_config.sqs_queue_url.endswith(".fifo") + self.logger.info("Building MessageAttributes") + msg_attrs = { + key: {"StringValue": kwargs[key], "DataType": "String"} + for key in kwargs + } + if is_fifo: + if not dedup_id or not group_id: + self.logger.error( + "Mandatory to provide dedup_id (Message deduplication ID) & group_id (Message group ID) when pushing to fifo queue" + ) + raise Exception( + "Mandatory to provide dedup_id (Message deduplication ID) & group_id (Message group ID) when pushing to fifo queue" + ) + response = self.__get_sqs_client.send_message( + QueueUrl=self.authentication_config.sqs_queue_url, + MessageAttributes=msg_attrs, + MessageBody=message, + MessageDeduplicationId=dedup_id, + MessageGroupId=group_id, + ) + else: + response = self.__get_sqs_client.send_message( + QueueUrl=self.authentication_config.sqs_queue_url, + MessageAttributes=msg_attrs, + MessageBody=message, + ) + + self.logger.info( + "Successfully pushed the message to SQS", + extra={"response": str(response)}, + ) + return response + except Exception as e: + self.logger.error( + "Error while writing to SQS queue", extra={"exception": str(e)} + ) + raise e + + def __delete_from_queue(self, receipt: str): + self.logger.info("Deleting message from SQS Queue") + try: + self.__get_sqs_client.delete_message( + QueueUrl=self.authentication_config.sqs_queue_url, ReceiptHandle=receipt + ) + self.logger.info("Successfully deleted message from SQS Queue") + except Exception as e: + self.logger.error( + "Error while deleting message from SQS queue", + extra={"exception": str(e)}, + ) + raise e + + @staticmethod + def get_status_or_default(status_value): + try: + # Check if status_value is a valid member of AlertStatus + return AlertStatus(status_value) + except ValueError: + # If not, return the default AlertStatus.FIRING + return AlertStatus.FIRING + + def _notify(self, message, group_id, dedup_id, **kwargs): + return self.__write_to_queue( + message=message, group_id=group_id, dedup_id=dedup_id, **kwargs + ) + + def start_consume(self): + self.consume = True + while self.consume: + response = self.__read_from_queue() + messages = response.get("Messages", []) + if not messages: + self.logger.info("No messages found. Queue is empty!") + + for message in messages: + try: + labels = {} + attrs = message.get("MessageAttributes", {}) + for msg_attr in attrs: + labels[msg_attr.lower()] = attrs[msg_attr].get( + "StringValue", attrs[msg_attr].get("BinaryValue", "") + ) + + alert_dict = { + "id": message["MessageId"], + "name": labels.get("name", message["Body"]), + "description": labels.get("description", message["Body"]), + "message": message["Body"], + "status": AmazonsqsProvider.get_status_or_default( + labels.get("status", "firing") + ), + "severity": self.alert_severity_dict.get( + labels.get("severity", "high"), AlertSeverity.HIGH + ), + "lastReceived": datetime.fromtimestamp( + float(message["Attributes"]["SentTimestamp"]) / 1000 + ).isoformat(), + "firingStartTime": datetime.fromtimestamp( + float(message["Attributes"]["SentTimestamp"]) / 1000 + ).isoformat(), + "labels": labels, + "source": ["amazonsqs"], + } + self._push_alert(alert_dict) + self.__delete_from_queue(receipt=message["ReceiptHandle"]) + except Exception as e: + self.logger.error(f"Error processing message: {e}") + + time.sleep(0.1) + self.logger.info("Consuming stopped") + + def stop_consume(self): + self.consume = False diff --git a/pyproject.toml b/pyproject.toml index 67edc18de..73ee56ac1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "keep" -version = "0.34.1" +version = "0.34.2" description = "Alerting. for developers, by developers." authors = ["Keep Alerting LTD"] packages = [{include = "keep"}]