Compare commits
No commits in common. "4a1f6d052c420942eb629bb40039450a214733e6" and "858ce77d8253a29615f730f7ae98bc928384f0ef" have entirely different histories.
4a1f6d052c
...
858ce77d82
|
@ -201,5 +201,3 @@ tags
|
||||||
# Persistent undo
|
# Persistent undo
|
||||||
[._]*.un~
|
[._]*.un~
|
||||||
|
|
||||||
### Project-specific
|
|
||||||
dealwatch.yml
|
|
|
@ -1,5 +0,0 @@
|
||||||
targets:
|
|
||||||
amazon.ca:
|
|
||||||
url: https://www.amazon.ca/Intel-i7-12700K-Desktop-Processor-Unlocked/dp/B09FXNVDBJ/?_encoding=UTF8&pd_rd_w=BXQyU&content-id=amzn1.sym.b09e9731-f0de-43db-b62a-8954bcec282c&pf_rd_p=b09e9731-f0de-43db-b62a-8954bcec282c&pf_rd_r=Z2HRQ8TYGA943PQFTW1Q&pd_rd_wg=AG2TD&pd_rd_r=e4766451-3584-4c4f-8235-bcd4a316909a&ref_=pd_gw_ci_mcx_mr_hp_atf_m
|
|
||||||
selector: .a-offscreen
|
|
||||||
regex: '[0-9]+(\.[0-9]{2})?'
|
|
|
@ -1,84 +0,0 @@
|
||||||
import argparse
|
|
||||||
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
from prometheus_client import start_http_server
|
|
||||||
|
|
||||||
from dealwatch.scrape_target import ScrapeTarget
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser("An utility to scrape e-commerce product price and expose them as prometheus metrics")
|
|
||||||
parser.add_argument(
|
|
||||||
'-c', '--config',
|
|
||||||
help='The configuration file. (default: %(default)s)',
|
|
||||||
type=str,
|
|
||||||
default='dealwatch.yml',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--user-agent',
|
|
||||||
help='The user-agent to spoof. (default: %(default)s)',
|
|
||||||
type=str,
|
|
||||||
default='Mozilla/5.0 (X11; Linux x86_64; rv:106.0) Gecko/20100101 Firefox/106.0',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'-p', '--listen-port',
|
|
||||||
help='The listen port for the http server. (default: %(default)s)',
|
|
||||||
type=int,
|
|
||||||
default=8000,
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'-a', '--listen-address',
|
|
||||||
help='The listen address for the http server. (default: %(default)s)',
|
|
||||||
type=str,
|
|
||||||
default='0.0.0.0',
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
scrape_targets = parse_config(args.config)
|
|
||||||
|
|
||||||
# setup the headers for each scrape targets
|
|
||||||
for scrape_target in scrape_targets:
|
|
||||||
scrape_target.headers = {
|
|
||||||
'Accept': '*/*',
|
|
||||||
'User-Agent': args.user_agent,
|
|
||||||
}
|
|
||||||
|
|
||||||
# start the http server to server the prometheus metrics
|
|
||||||
start_http_server(args.listen_port, args.listen_address)
|
|
||||||
|
|
||||||
for scrape_target in scrape_targets:
|
|
||||||
print(scrape_target.query_target())
|
|
||||||
|
|
||||||
def parse_config(config_filename):
|
|
||||||
result = []
|
|
||||||
print('Loading configurations from %s' % config_filename)
|
|
||||||
with open(config_filename, 'r') as f:
|
|
||||||
config = yaml.safe_load(f)
|
|
||||||
|
|
||||||
# iterate through products listed in the configuration
|
|
||||||
products = get_field_or_die(config, 'products')
|
|
||||||
for product in products:
|
|
||||||
product_name = get_field_or_die(product, 'name')
|
|
||||||
|
|
||||||
# iterate through the targets listed for each products in the configuration
|
|
||||||
targets = get_field_or_die(product, 'targets')
|
|
||||||
for target in targets:
|
|
||||||
# Create a ScrapeTarget for each targets to scrape
|
|
||||||
result.append(ScrapeTarget(
|
|
||||||
product_name=product_name,
|
|
||||||
target_name=get_field_or_die(target, 'name'),
|
|
||||||
url=get_field_or_die(target, 'url'),
|
|
||||||
selector=get_field_or_die(target, 'selector'),
|
|
||||||
regex=target.get('regex'),
|
|
||||||
))
|
|
||||||
return result
|
|
||||||
|
|
||||||
def get_field_or_die(mapping, field_name):
|
|
||||||
value = mapping.get(field_name)
|
|
||||||
if value is None:
|
|
||||||
raise Exception('Missing required field: %s' % field_name)
|
|
||||||
else:
|
|
||||||
return value
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
def main():
|
||||||
|
print("Hello world")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -1,36 +0,0 @@
|
||||||
import re
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
import parsel
|
|
||||||
|
|
||||||
class ScrapeTarget:
|
|
||||||
def __init__(self, product_name, target_name, url, selector, regex=None):
|
|
||||||
self.product_name = product_name
|
|
||||||
self.target_name = target_name
|
|
||||||
self.url = url
|
|
||||||
self.selector = selector+'::text'
|
|
||||||
self.regex = re.compile(regex if regex else r'[0-9]+(\.[0-9]{2})?')
|
|
||||||
self.headers = {}
|
|
||||||
|
|
||||||
def query_target(self):
|
|
||||||
print('Query product %s, target %s' % (self.product_name, self.target_name))
|
|
||||||
# some sites get suspicious if we talk to them in HTTP/1.1
|
|
||||||
# we use httpx to have HTTP2 support and circumvent that issue
|
|
||||||
query_response = httpx.get(
|
|
||||||
url=self.url,
|
|
||||||
headers=self.headers,
|
|
||||||
follow_redirects=True,
|
|
||||||
).text
|
|
||||||
selector = parsel.Selector(text=query_response)
|
|
||||||
|
|
||||||
# Match the selector
|
|
||||||
selector_match = selector.css(self.selector).get()
|
|
||||||
if selector_match:
|
|
||||||
# Match the regex
|
|
||||||
regex_match = self.regex.search(selector_match)
|
|
||||||
if regex_match:
|
|
||||||
str_result = regex_match.group(0)
|
|
||||||
# Convert the reult to float
|
|
||||||
float_result = float(str_result)
|
|
||||||
return float_result
|
|
||||||
return None
|
|
|
@ -10,13 +10,10 @@ setup_requires =
|
||||||
setuptools
|
setuptools
|
||||||
setuptools_scm
|
setuptools_scm
|
||||||
install_requires=
|
install_requires=
|
||||||
PyYAML~=6.0
|
|
||||||
httpx~=0.23.0
|
|
||||||
parsel~=1.6.0
|
|
||||||
prometheus-client~=0.15.0
|
prometheus-client~=0.15.0
|
||||||
|
|
||||||
[options.entry_points]
|
[options.entry_points]
|
||||||
console_scripts =
|
console_scripts =
|
||||||
dealwatch = dealwatch.cli:main
|
dealwatch = dealwatch.main:main
|
||||||
|
|
||||||
[tool.setuptools_scm]
|
[tool.setuptools_scm]
|
Loading…
Reference in New Issue