1
0
Fork 0

Compare commits

...

2 Commits

Author SHA1 Message Date
Massaki Archambault 4a1f6d052c setup scrapper 2022-10-30 00:47:57 -04:00
Massaki Archambault 687ac4317d configuration file parsing 2022-10-29 23:35:36 -04:00
6 changed files with 131 additions and 6 deletions

2
.gitignore vendored
View File

@ -201,3 +201,5 @@ tags
# Persistent undo # Persistent undo
[._]*.un~ [._]*.un~
### Project-specific
dealwatch.yml

5
dealwatch.example.yml Normal file
View File

@ -0,0 +1,5 @@
targets:
amazon.ca:
url: https://www.amazon.ca/Intel-i7-12700K-Desktop-Processor-Unlocked/dp/B09FXNVDBJ/?_encoding=UTF8&pd_rd_w=BXQyU&content-id=amzn1.sym.b09e9731-f0de-43db-b62a-8954bcec282c&pf_rd_p=b09e9731-f0de-43db-b62a-8954bcec282c&pf_rd_r=Z2HRQ8TYGA943PQFTW1Q&pd_rd_wg=AG2TD&pd_rd_r=e4766451-3584-4c4f-8235-bcd4a316909a&ref_=pd_gw_ci_mcx_mr_hp_atf_m
selector: .a-offscreen
regex: '[0-9]+(\.[0-9]{2})?'

84
dealwatch/cli.py Normal file
View File

@ -0,0 +1,84 @@
import argparse
import yaml
from prometheus_client import start_http_server
from dealwatch.scrape_target import ScrapeTarget
def main():
parser = argparse.ArgumentParser("An utility to scrape e-commerce product price and expose them as prometheus metrics")
parser.add_argument(
'-c', '--config',
help='The configuration file. (default: %(default)s)',
type=str,
default='dealwatch.yml',
)
parser.add_argument(
'--user-agent',
help='The user-agent to spoof. (default: %(default)s)',
type=str,
default='Mozilla/5.0 (X11; Linux x86_64; rv:106.0) Gecko/20100101 Firefox/106.0',
)
parser.add_argument(
'-p', '--listen-port',
help='The listen port for the http server. (default: %(default)s)',
type=int,
default=8000,
)
parser.add_argument(
'-a', '--listen-address',
help='The listen address for the http server. (default: %(default)s)',
type=str,
default='0.0.0.0',
)
args = parser.parse_args()
scrape_targets = parse_config(args.config)
# setup the headers for each scrape targets
for scrape_target in scrape_targets:
scrape_target.headers = {
'Accept': '*/*',
'User-Agent': args.user_agent,
}
# start the http server to server the prometheus metrics
start_http_server(args.listen_port, args.listen_address)
for scrape_target in scrape_targets:
print(scrape_target.query_target())
def parse_config(config_filename):
result = []
print('Loading configurations from %s' % config_filename)
with open(config_filename, 'r') as f:
config = yaml.safe_load(f)
# iterate through products listed in the configuration
products = get_field_or_die(config, 'products')
for product in products:
product_name = get_field_or_die(product, 'name')
# iterate through the targets listed for each products in the configuration
targets = get_field_or_die(product, 'targets')
for target in targets:
# Create a ScrapeTarget for each targets to scrape
result.append(ScrapeTarget(
product_name=product_name,
target_name=get_field_or_die(target, 'name'),
url=get_field_or_die(target, 'url'),
selector=get_field_or_die(target, 'selector'),
regex=target.get('regex'),
))
return result
def get_field_or_die(mapping, field_name):
value = mapping.get(field_name)
if value is None:
raise Exception('Missing required field: %s' % field_name)
else:
return value
if __name__ == '__main__':
main()

View File

@ -1,5 +0,0 @@
def main():
print("Hello world")
if __name__ == '__main__':
main()

View File

@ -0,0 +1,36 @@
import re
import httpx
import parsel
class ScrapeTarget:
def __init__(self, product_name, target_name, url, selector, regex=None):
self.product_name = product_name
self.target_name = target_name
self.url = url
self.selector = selector+'::text'
self.regex = re.compile(regex if regex else r'[0-9]+(\.[0-9]{2})?')
self.headers = {}
def query_target(self):
print('Query product %s, target %s' % (self.product_name, self.target_name))
# some sites get suspicious if we talk to them in HTTP/1.1
# we use httpx to have HTTP2 support and circumvent that issue
query_response = httpx.get(
url=self.url,
headers=self.headers,
follow_redirects=True,
).text
selector = parsel.Selector(text=query_response)
# Match the selector
selector_match = selector.css(self.selector).get()
if selector_match:
# Match the regex
regex_match = self.regex.search(selector_match)
if regex_match:
str_result = regex_match.group(0)
# Convert the reult to float
float_result = float(str_result)
return float_result
return None

View File

@ -10,10 +10,13 @@ setup_requires =
setuptools setuptools
setuptools_scm setuptools_scm
install_requires= install_requires=
PyYAML~=6.0
httpx~=0.23.0
parsel~=1.6.0
prometheus-client~=0.15.0 prometheus-client~=0.15.0
[options.entry_points] [options.entry_points]
console_scripts = console_scripts =
dealwatch = dealwatch.main:main dealwatch = dealwatch.cli:main
[tool.setuptools_scm] [tool.setuptools_scm]