chore: adding metric grabber + celery runner

This commit is contained in:
surtur 2020-08-11 14:25:00 +02:00
parent 927bdd03ce
commit fad6ab856a
Signed by: wanderer
GPG Key ID: 19CE1EC1D9E0486D
3 changed files with 77 additions and 0 deletions

7
app/celery.py Normal file

@ -0,0 +1,7 @@
from __future__ import absolute_import
from celery import Celery
app = Celery('app',
broker='amqp://cl:cl1234@localhost/cl_vhost',
backend='rpc://',
include=['app.tasks'])

65
app/ultrametrics.py Normal file

@ -0,0 +1,65 @@
import asyncio, logging, time
from datetime import datetime
from threading import Thread
from typing import Optional, Tuple, Sequence
import aiohttp
logging.basicConfig(format='%(levelname)s:%(message)s', filename='example.log', filemode='w', level=logging.INFO)
URLS = [
"https://python.org",
"https://google.com",
"https://stackoverflow.com",
'https://dotya.ml',
'https://git.dotya.ml',
'https://drone.dotya.ml',
'https://netdata.dotya.ml',
'https://tew0x00.dotya.ml',
'https://speedtest.dotya.ml',
'https://testdotya.dotya.ml',
"https://ubuntu.com",
]
def start_background_loop(loop: asyncio.AbstractEventLoop) -> None:
asyncio.set_event_loop(loop)
loop.run_forever()
async def fetch(url: str, session: aiohttp.ClientSession = None) -> Tuple[str, str]:
async def _fetch(url: str, session: aiohttp.ClientSession):
s = time.time()
async with session.head(url) as response:
f = time.time() - s
m = ('[ %f ]\t<%s>\ttook: %f\t(%s)' %(time.time(),response.status,f,url))
print(m)
logging.info(m)
return url
if session:
return await _fetch(url, session)
else:
async with aiohttp.ClientSession() as session:
return await _fetch(url, session)
async def fetch_urls(loop: asyncio.AbstractEventLoop) -> Sequence[Tuple[str, str]]:
async with aiohttp.ClientSession() as session:
tasks = [loop.create_task(fetch(url, session)) for url in URLS]
results = await asyncio.gather(*tasks)
return results
def main() -> None:
loop = asyncio.new_event_loop()
t = Thread(target=start_background_loop, args=(loop,), daemon=True)
t.start()
while True:
task = asyncio.run_coroutine_threadsafe(fetch_urls(loop), loop)
time.sleep(10)
if __name__ == '__main__':
main()

5
nurun.sh Normal file

@ -0,0 +1,5 @@
#!/bin/bash
celery worker -A app.celery --loglevel=info &
sleep 5
python -m app.run_tasks &