json parser
This commit is contained in:
parent
4a1f6d052c
commit
bda9b43318
|
@ -1,4 +1,5 @@
|
||||||
import argparse
|
import argparse
|
||||||
|
import time
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
@ -14,6 +15,12 @@ def main():
|
||||||
type=str,
|
type=str,
|
||||||
default='dealwatch.yml',
|
default='dealwatch.yml',
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-i', '--interval',
|
||||||
|
help='The target scrape interval, in minutes. (default: %(default)s)',
|
||||||
|
type=float,
|
||||||
|
default=10,
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--user-agent',
|
'--user-agent',
|
||||||
help='The user-agent to spoof. (default: %(default)s)',
|
help='The user-agent to spoof. (default: %(default)s)',
|
||||||
|
@ -46,8 +53,11 @@ def main():
|
||||||
# start the http server to server the prometheus metrics
|
# start the http server to server the prometheus metrics
|
||||||
start_http_server(args.listen_port, args.listen_address)
|
start_http_server(args.listen_port, args.listen_address)
|
||||||
|
|
||||||
|
# start the main loop
|
||||||
|
while True:
|
||||||
for scrape_target in scrape_targets:
|
for scrape_target in scrape_targets:
|
||||||
print(scrape_target.query_target())
|
print(scrape_target.query_target())
|
||||||
|
time.sleep(args.interval * 60)
|
||||||
|
|
||||||
def parse_config(config_filename):
|
def parse_config(config_filename):
|
||||||
result = []
|
result = []
|
||||||
|
@ -70,6 +80,7 @@ def parse_config(config_filename):
|
||||||
url=get_field_or_die(target, 'url'),
|
url=get_field_or_die(target, 'url'),
|
||||||
selector=get_field_or_die(target, 'selector'),
|
selector=get_field_or_die(target, 'selector'),
|
||||||
regex=target.get('regex'),
|
regex=target.get('regex'),
|
||||||
|
parser=target.get('parser'),
|
||||||
))
|
))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,19 @@
|
||||||
|
from email import parser
|
||||||
|
import json
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
import parsel
|
import parsel
|
||||||
|
import pyjq
|
||||||
|
|
||||||
class ScrapeTarget:
|
class ScrapeTarget:
|
||||||
def __init__(self, product_name, target_name, url, selector, regex=None):
|
def __init__(self, product_name, target_name, url, selector, regex=None, parser=None):
|
||||||
self.product_name = product_name
|
self.product_name = product_name
|
||||||
self.target_name = target_name
|
self.target_name = target_name
|
||||||
self.url = url
|
self.url = url
|
||||||
self.selector = selector+'::text'
|
self.selector = selector
|
||||||
self.regex = re.compile(regex if regex else r'[0-9]+(\.[0-9]{2})?')
|
self.regex = re.compile(regex if regex else r'[0-9]+(\.[0-9]{2})?')
|
||||||
|
self.parser = parser if parser else 'html'
|
||||||
self.headers = {}
|
self.headers = {}
|
||||||
|
|
||||||
def query_target(self):
|
def query_target(self):
|
||||||
|
@ -21,16 +25,35 @@ class ScrapeTarget:
|
||||||
headers=self.headers,
|
headers=self.headers,
|
||||||
follow_redirects=True,
|
follow_redirects=True,
|
||||||
).text
|
).text
|
||||||
selector = parsel.Selector(text=query_response)
|
|
||||||
|
|
||||||
# Match the selector
|
# parse the response and match the selector
|
||||||
|
selector_match = ''
|
||||||
|
if self.parser == 'html':
|
||||||
|
# parse response as html
|
||||||
|
selector = parsel.Selector(text=query_response)
|
||||||
selector_match = selector.css(self.selector).get()
|
selector_match = selector.css(self.selector).get()
|
||||||
if selector_match:
|
elif self.parser == 'json':
|
||||||
# Match the regex
|
# parse response as json
|
||||||
|
query_response_json = json.loads(query_response)
|
||||||
|
selector_match = str(pyjq.first(self.selector, query_response_json))
|
||||||
|
else:
|
||||||
|
# TODO: better error handling
|
||||||
|
print('invalid parser!')
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not selector_match:
|
||||||
|
# TODO: better error handling
|
||||||
|
print('no selector_match!')
|
||||||
|
return None
|
||||||
|
|
||||||
|
# match the regex
|
||||||
regex_match = self.regex.search(selector_match)
|
regex_match = self.regex.search(selector_match)
|
||||||
if regex_match:
|
if regex_match:
|
||||||
str_result = regex_match.group(0)
|
str_result = regex_match.group(0)
|
||||||
# Convert the reult to float
|
# convert the result to float
|
||||||
float_result = float(str_result)
|
float_result = float(str_result)
|
||||||
return float_result
|
return float_result
|
||||||
|
else:
|
||||||
|
# TODO: better error handling
|
||||||
|
print('no regex match!')
|
||||||
return None
|
return None
|
Loading…
Reference in New Issue