Skip to content

Commit

Permalink
fix: not crash when can't connect to mongoshake url (#7)
Browse files Browse the repository at this point in the history
  • Loading branch information
emdneto authored Mar 30, 2024
1 parent a5608a7 commit 2342161
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 14 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ More information about the metrics that Alibaba MongoShake exports on its endpoi

To use the MongoShake Prometheus Exporter, attention must be paid to two mandatory environment variables:

- `URL_LIST`: A list of URLs to be queried by the MongoShake Prometheus Exporter including the access port and path.
- `SCRAPE_INTERVAL`: The time interval at which the MongoShake Prometheus Exporter will query the URLs.
- `MONGOSHAKE_SCRAPE_URL`: A list of URLs to be queried by the MongoShake Prometheus Exporter including the access port and path.
- `MONGOSHAKE_SCRAPE_INTERVAL`: The time interval at which the MongoShake Prometheus Exporter will query the URLs.

### Example

Expand Down Expand Up @@ -97,7 +97,7 @@ docker pull ghcr.io/osgurisdosre/mongoshake-prometheus-exporter:main

Run:
```bash
docker run -p 8000:8000 -p 9100:9100 -p 9200:9200 -e SCRAPE_INTERVAL="5" -e URL_LIST="http://host.docker.internal:9100/repl,http://host.docker.internal:9200/repl" ghcr.io/osgurisdosre/mongoshake-prometheus-exporter:main
docker run -p 8000:8000 -p 9100:9100 -p 9200:9200 -e MONGOSHAKE_SCRAPE_INTERVAL="5" -e MONGOSHAKE_SCRAPE_URL="http://host.docker.internal:9100/repl,http://host.docker.internal:9200/repl" ghcr.io/osgurisdosre/mongoshake-prometheus-exporter:main
```

In the command above, we are exporting ports `9100` and `9200` to the container in addition to port `8000` from the container to the host. In this case, as we want the container to interpret the `URL` locallhost from the host and not the container, we use the following host.docker.internal.
Expand Down
25 changes: 14 additions & 11 deletions src/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
prometheus_client.REGISTRY.unregister(prometheus_client.GC_COLLECTOR)

# List of URLs to scrape
URL_LIST=os.environ["URL_LIST"].split(",")
MONGOSHAKE_SCRAPE_URL=os.environ.get("MONGOSHAKE_SCRAPE_URL", "http://localhost:9100/repl").split(',')
# Scrape interval
SCRAPE_INTERVAL=int(os.environ["SCRAPE_INTERVAL"])
MONGOSHAKE_SCRAPE_INTERVAL=int(os.environ.get("MONGOSHAKE_SCRAPE_INTERVAL", 10))

# Prometheus metric names
metric_prefix = "mongoshake"
Expand All @@ -27,13 +27,16 @@

# Fetch url data
async def fetch_metrics(url, prom_metrics):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers={"Accept": "application/json"}) as response:
if response.status == 200:
data = await response.json(content_type=None)
update_prometheus_metrics(data, prom_metrics, url)
else:
print(f"Failed to fetch data from {url}: {response.status}")
try:
async with aiohttp.ClientSession() as session:
async with session.get(url, headers={"Accept": "application/json"}) as response:
if response.status == 200:
data = await response.json(content_type=None)
update_prometheus_metrics(data, prom_metrics, url)
else:
print(f"Failed to fetch data from {url}: {response.status}")
except Exception as err:
print(err, url)

# Print metrics in webserver
def update_prometheus_metrics(data, prom_metrics, url):
Expand All @@ -55,10 +58,10 @@ async def main():

# Start app
while True:
await asyncio.gather(*[fetch_metrics(url, prom_metrics) for url in URL_LIST])
await asyncio.gather(*[fetch_metrics(url, prom_metrics) for url in MONGOSHAKE_SCRAPE_URL])

# Wait for 5 scrape interval
await asyncio.sleep(SCRAPE_INTERVAL)
await asyncio.sleep(MONGOSHAKE_SCRAPE_INTERVAL)

if __name__ == "__main__":
try:
Expand Down

0 comments on commit 2342161

Please sign in to comment.