diff --git a/README.md b/README.md index 5a151a7..e2f3018 100644 --- a/README.md +++ b/README.md @@ -35,8 +35,8 @@ More information about the metrics that Alibaba MongoShake exports on its endpoi To use the MongoShake Prometheus Exporter, attention must be paid to two mandatory environment variables: -- `URL_LIST`: A list of URLs to be queried by the MongoShake Prometheus Exporter including the access port and path. -- `SCRAPE_INTERVAL`: The time interval at which the MongoShake Prometheus Exporter will query the URLs. +- `MONGOSHAKE_SCRAPE_URL`: A list of URLs to be queried by the MongoShake Prometheus Exporter including the access port and path. +- `MONGOSHAKE_SCRAPE_INTERVAL`: The time interval at which the MongoShake Prometheus Exporter will query the URLs. ### Example @@ -97,7 +97,7 @@ docker pull ghcr.io/osgurisdosre/mongoshake-prometheus-exporter:main Run: ```bash -docker run -p 8000:8000 -p 9100:9100 -p 9200:9200 -e SCRAPE_INTERVAL="5" -e URL_LIST="http://host.docker.internal:9100/repl,http://host.docker.internal:9200/repl" ghcr.io/osgurisdosre/mongoshake-prometheus-exporter:main +docker run -p 8000:8000 -p 9100:9100 -p 9200:9200 -e MONGOSHAKE_SCRAPE_INTERVAL="5" -e MONGOSHAKE_SCRAPE_URL="http://host.docker.internal:9100/repl,http://host.docker.internal:9200/repl" ghcr.io/osgurisdosre/mongoshake-prometheus-exporter:main ``` In the command above, we are exporting ports `9100` and `9200` to the container in addition to port `8000` from the container to the host. In this case, as we want the container to interpret the `URL` locallhost from the host and not the container, we use the following host.docker.internal. diff --git a/src/app.py b/src/app.py index efa3e66..b8904de 100644 --- a/src/app.py +++ b/src/app.py @@ -11,9 +11,9 @@ prometheus_client.REGISTRY.unregister(prometheus_client.GC_COLLECTOR) # List of URLs to scrape -URL_LIST=os.environ["URL_LIST"].split(",") +MONGOSHAKE_SCRAPE_URL=os.environ.get("MONGOSHAKE_SCRAPE_URL", "http://localhost:9100/repl").split(',') # Scrape interval -SCRAPE_INTERVAL=int(os.environ["SCRAPE_INTERVAL"]) +MONGOSHAKE_SCRAPE_INTERVAL=int(os.environ.get("MONGOSHAKE_SCRAPE_INTERVAL", 10)) # Prometheus metric names metric_prefix = "mongoshake" @@ -27,13 +27,16 @@ # Fetch url data async def fetch_metrics(url, prom_metrics): - async with aiohttp.ClientSession() as session: - async with session.get(url, headers={"Accept": "application/json"}) as response: - if response.status == 200: - data = await response.json(content_type=None) - update_prometheus_metrics(data, prom_metrics, url) - else: - print(f"Failed to fetch data from {url}: {response.status}") + try: + async with aiohttp.ClientSession() as session: + async with session.get(url, headers={"Accept": "application/json"}) as response: + if response.status == 200: + data = await response.json(content_type=None) + update_prometheus_metrics(data, prom_metrics, url) + else: + print(f"Failed to fetch data from {url}: {response.status}") + except Exception as err: + print(err, url) # Print metrics in webserver def update_prometheus_metrics(data, prom_metrics, url): @@ -55,10 +58,10 @@ async def main(): # Start app while True: - await asyncio.gather(*[fetch_metrics(url, prom_metrics) for url in URL_LIST]) + await asyncio.gather(*[fetch_metrics(url, prom_metrics) for url in MONGOSHAKE_SCRAPE_URL]) # Wait for 5 scrape interval - await asyncio.sleep(SCRAPE_INTERVAL) + await asyncio.sleep(MONGOSHAKE_SCRAPE_INTERVAL) if __name__ == "__main__": try: