1
0
Fork 0

Compare commits

..

5 Commits

9 changed files with 121 additions and 48 deletions

2
.gitignore vendored
View File

@ -202,4 +202,4 @@ tags
[._]*.un~ [._]*.un~
### Project-specific ### Project-specific
dealwatch.yml ecommerce-exporter.yml

View File

@ -2,4 +2,4 @@ FROM python:3.10
COPY . /tmp/package COPY . /tmp/package
RUN pip install --no-cache-dir /tmp/package && \ RUN pip install --no-cache-dir /tmp/package && \
rm -r /tmp/package rm -r /tmp/package
ENTRYPOINT ["dealwatch"] ENTRYPOINT ["ecommerce-exporter"]

View File

@ -1,2 +1,2 @@
# dealwatch # ecommerce-exporter

View File

@ -1,36 +0,0 @@
import re
import httpx
import parsel
class ScrapeTarget:
def __init__(self, product_name, target_name, url, selector, regex=None):
self.product_name = product_name
self.target_name = target_name
self.url = url
self.selector = selector+'::text'
self.regex = re.compile(regex if regex else r'[0-9]+(\.[0-9]{2})?')
self.headers = {}
def query_target(self):
print('Query product %s, target %s' % (self.product_name, self.target_name))
# some sites get suspicious if we talk to them in HTTP/1.1
# we use httpx to have HTTP2 support and circumvent that issue
query_response = httpx.get(
url=self.url,
headers=self.headers,
follow_redirects=True,
).text
selector = parsel.Selector(text=query_response)
# Match the selector
selector_match = selector.css(self.selector).get()
if selector_match:
# Match the regex
regex_match = self.regex.search(selector_match)
if regex_match:
str_result = regex_match.group(0)
# Convert the reult to float
float_result = float(str_result)
return float_result
return None

View File

@ -1,10 +1,29 @@
import argparse import argparse
import os
import time
import yaml import yaml
from prometheus_client import start_http_server from httpx import RequestError
from prometheus_client import start_http_server, Gauge, Counter
from dealwatch.scrape_target import ScrapeTarget from ecommerce_exporter.scrape_target import ScrapeError, ScrapeTarget
ECOMMERCE_SCRAPE_TARGET_VALUE = Gauge(
'ecommerce_scrape_target_value',
'The value scraped from a scrape target',
['product_name', 'target_name'],
)
ECOMMERCE_SCRAPE_TARGET_SUCCESS = Counter(
'ecommerce_scrape_target_success_total',
'The number of successful scrape and parse of a scrape target',
['product_name', 'target_name'],
)
ECOMMERCE_SCRAPE_TARGET_FAILURE = Counter(
'ecommerce_scrape_target_failure_total',
'The number of failed scrape and parse of a scrape target',
['product_name', 'target_name', 'exception'],
)
def main(): def main():
parser = argparse.ArgumentParser("An utility to scrape e-commerce product price and expose them as prometheus metrics") parser = argparse.ArgumentParser("An utility to scrape e-commerce product price and expose them as prometheus metrics")
@ -12,7 +31,13 @@ def main():
'-c', '--config', '-c', '--config',
help='The configuration file. (default: %(default)s)', help='The configuration file. (default: %(default)s)',
type=str, type=str,
default='dealwatch.yml', default='ecommerce-exporter.yml',
)
parser.add_argument(
'-i', '--interval',
help='The target scrape interval, in minutes. (default: %(default)s)',
type=float,
default=15,
) )
parser.add_argument( parser.add_argument(
'--user-agent', '--user-agent',
@ -34,7 +59,7 @@ def main():
) )
args = parser.parse_args() args = parser.parse_args()
scrape_targets = parse_config(args.config) scrape_targets = parse_config(os.path.abspath(args.config))
# setup the headers for each scrape targets # setup the headers for each scrape targets
for scrape_target in scrape_targets: for scrape_target in scrape_targets:
@ -46,8 +71,28 @@ def main():
# start the http server to server the prometheus metrics # start the http server to server the prometheus metrics
start_http_server(args.listen_port, args.listen_address) start_http_server(args.listen_port, args.listen_address)
for scrape_target in scrape_targets: # start the main loop
print(scrape_target.query_target()) while True:
for scrape_target in scrape_targets:
try:
print("Starting scrape. product: '%s', target '%s'" % (scrape_target.product_name, scrape_target.target_name))
value = scrape_target.query_target()
ECOMMERCE_SCRAPE_TARGET_VALUE.labels(
product_name=scrape_target.product_name,
target_name=scrape_target.target_name
).set(value)
ECOMMERCE_SCRAPE_TARGET_SUCCESS.labels(
product_name=scrape_target.product_name,
target_name=scrape_target.target_name,
).inc()
except (RequestError, ScrapeError) as e:
print("Failed to scrape! product: '%s', target: '%s', message: '%s'" % (scrape_target.product_name, scrape_target.target_name, e))
ECOMMERCE_SCRAPE_TARGET_FAILURE.labels(
product_name=scrape_target.product_name,
target_name=scrape_target.target_name,
exception=e.__class__.__name__,
).inc()
time.sleep(args.interval * 60)
def parse_config(config_filename): def parse_config(config_filename):
result = [] result = []
@ -66,10 +111,11 @@ def parse_config(config_filename):
# Create a ScrapeTarget for each targets to scrape # Create a ScrapeTarget for each targets to scrape
result.append(ScrapeTarget( result.append(ScrapeTarget(
product_name=product_name, product_name=product_name,
target_name=get_field_or_die(target, 'name'),
url=get_field_or_die(target, 'url'), url=get_field_or_die(target, 'url'),
selector=get_field_or_die(target, 'selector'), selector=get_field_or_die(target, 'selector'),
target_name=target.get('name'),
regex=target.get('regex'), regex=target.get('regex'),
parser=target.get('parser'),
)) ))
return result return result

View File

@ -0,0 +1,62 @@
import json
import re
from urllib.parse import urlparse
import httpx
import parsel
import pyjq
class ScrapeTarget:
def __init__(self, product_name, url, selector, target_name=None, regex=None, parser=None):
self.product_name = product_name
self.target_name = target_name if target_name else urlparse(url).hostname
self.url = url
self.selector = selector
self.regex = re.compile(regex if regex else r'[0-9]+(\.[0-9]{2})?')
self.parser = parser if parser else 'html'
self.headers = {}
# sanity check
valid_parsers = ('html', 'json')
if self.parser not in valid_parsers:
raise ValueError("Invalid parser configured (got '%s' but need one of %s) product: '%s', target: '%s'" % (self.parser, valid_parsers, self.product_name, self.target_name))
def query_target(self):
# some sites get suspicious if we talk to them in HTTP/1.1 (maybe because it doesn't match our user-agent?)
# we use httpx to have HTTP2 support and circumvent that issue
query_response = httpx.get(
url=self.url,
headers=self.headers,
follow_redirects=True,
).text
# parse the response and match the selector
selector_match = ''
if self.parser == 'html':
# parse response as html
selector = parsel.Selector(text=query_response)
selector_match = selector.css(self.selector).get()
elif self.parser == 'json':
# parse response as json
query_response_json = json.loads(query_response)
selector_match = str(pyjq.first(self.selector, query_response_json))
else:
raise ScrapeError('Invalid parser!')
if not selector_match:
raise ScrapeError('Failed to match selector!')
# match the regex
regex_match = self.regex.search(selector_match)
if regex_match:
str_result = regex_match.group(0)
# convert the result to float
float_result = float(str_result)
return float_result
else:
raise ScrapeError('Failed to match regex!')
class ScrapeError(Exception):
def __init__(self, msg):
super().__init__(msg)

View File

@ -1,5 +1,5 @@
[metadata] [metadata]
name = dealwatch name = ecommerce-exporter
author = badjware author = badjware
author_email = marchambault.badjware.dev author_email = marchambault.badjware.dev
platform = any platform = any
@ -13,10 +13,11 @@ install_requires=
PyYAML~=6.0 PyYAML~=6.0
httpx~=0.23.0 httpx~=0.23.0
parsel~=1.6.0 parsel~=1.6.0
pyjq~=2.6.0
prometheus-client~=0.15.0 prometheus-client~=0.15.0
[options.entry_points] [options.entry_points]
console_scripts = console_scripts =
dealwatch = dealwatch.cli:main ecommerce-exporter = ecommerce_exporter.cli:main
[tool.setuptools_scm] [tool.setuptools_scm]