From 6a13728220cfce611899b2ef1e8db1ccef99e891 Mon Sep 17 00:00:00 2001 From: Massaki Archambault Date: Wed, 9 Oct 2024 19:11:11 -0400 Subject: [PATCH] improve logging --- ecommerce_exporter/cli.py | 21 ++++++++++++++------- ecommerce_exporter/scrape_target.py | 20 ++++++++++++++------ setup.cfg | 2 +- 3 files changed, 29 insertions(+), 14 deletions(-) diff --git a/ecommerce_exporter/cli.py b/ecommerce_exporter/cli.py index adb2a9a..1c1d580 100644 --- a/ecommerce_exporter/cli.py +++ b/ecommerce_exporter/cli.py @@ -1,12 +1,19 @@ import argparse import os import time +import logging import yaml from prometheus_client import start_http_server, Gauge, Counter -from ecommerce_exporter.scrape_target import ScrapeError, ScrapeTarget +from ecommerce_exporter.scrape_target import ScrapeTarget + +logging.basicConfig( + format=os.environ.get('LOG_FORMAT', '[%(asctime)s] [%(levelname)-8s] %(message)s'), + level=os.environ.get('LOG_LEVEL', 'INFO') +) +logger = logging.getLogger(__name__) ECOMMERCE_SCRAPE_TARGET_VALUE = Gauge( 'ecommerce_scrape_target_value', @@ -63,19 +70,19 @@ def main(): # setup the headers for each scrape targets for scrape_target in scrape_targets: scrape_target.headers = { - 'Accept': '*/*', - 'User-Agent': args.user_agent, + 'accept': '*/*', + 'user-agent': args.user_agent, } # start the http server to server the prometheus metrics - print("serving metrics on http://%s:%s/metrics" % (args.listen_address, args.listen_port)) + logger.info("serving metrics on http://%s:%s/metrics", args.listen_address, args.listen_port) start_http_server(args.listen_port, args.listen_address) # start the main loop while True: for scrape_target in scrape_targets: try: - print("Starting scrape. product: '%s', target '%s'" % (scrape_target.product_name, scrape_target.target_name)) + logger.info("Starting scrape. product: '%s', target '%s'", scrape_target.product_name, scrape_target.target_name) value = scrape_target.query_target() ECOMMERCE_SCRAPE_TARGET_VALUE.labels( product_name=scrape_target.product_name, @@ -88,7 +95,7 @@ def main(): except KeyboardInterrupt: return except Exception as e: - print("Failed to scrape! product: '%s', target: '%s', message: '%s'" % (scrape_target.product_name, scrape_target.target_name, e)) + logger.error("Failed to scrape! product: '%s', target: '%s', message: '%s'" , scrape_target.product_name, scrape_target.target_name, e) ECOMMERCE_SCRAPE_TARGET_FAILURE.labels( product_name=scrape_target.product_name, target_name=scrape_target.target_name, @@ -98,7 +105,7 @@ def main(): def parse_config(config_filename): result = [] - print('Loading configurations from %s' % config_filename) + logger.info('Loading configurations from %s', config_filename) with open(config_filename, 'r') as f: config = yaml.safe_load(f) diff --git a/ecommerce_exporter/scrape_target.py b/ecommerce_exporter/scrape_target.py index ffce4e6..d29d7e7 100644 --- a/ecommerce_exporter/scrape_target.py +++ b/ecommerce_exporter/scrape_target.py @@ -6,7 +6,9 @@ from urllib.parse import urlparse import httpx import parsel import pyjq +import logging +logger = logging.getLogger(__name__) class ScrapeTarget: def __init__(self, product_name, url, selector, target_name=None, regex=None, parser=None): self.product_name = product_name @@ -16,6 +18,10 @@ class ScrapeTarget: self.regex = re.compile(regex if regex else r'[0-9]+(\.[0-9]{2})?') self.parser = parser if parser else 'html' self.headers = {} + self.client = httpx.Client( + follow_redirects=True, + http2=True, + ) # sanity check valid_parsers = ('html', 'json') @@ -25,21 +31,23 @@ class ScrapeTarget: def query_target(self): # some sites get suspicious if we talk to them in HTTP/1.1 (maybe because it doesn't match our user-agent?) # we use httpx to have HTTP2 support and circumvent that issue - query_response = httpx.get( - url=self.url, + query_response = self.client.get( + self.url, headers=self.headers, - follow_redirects=True, - ).text + ) + logger.info('Status: %s', query_response.status_code) + query_response_text = query_response.text + logger.debug('Response: %s', query_response_text) # parse the response and match the selector selector_match = '' if self.parser == 'html': # parse response as html - selector = parsel.Selector(text=query_response) + selector = parsel.Selector(text=query_response_text) selector_match = selector.css(self.selector).get() elif self.parser == 'json': # parse response as json - query_response_json = json.loads(query_response) + query_response_json = json.loads(query_response_text) selector_match = str(pyjq.first(self.selector, query_response_json)) else: raise ScrapeError('Invalid parser!') diff --git a/setup.cfg b/setup.cfg index 35b0ba7..6aa03af 100644 --- a/setup.cfg +++ b/setup.cfg @@ -16,7 +16,7 @@ setup_requires = setuptools_scm install_requires= PyYAML~=6.0 - httpx~=0.23.0 + httpx[http2]~=0.23.0 parsel~=1.6.0 pyjq~=2.6.0 prometheus-client~=0.15.0