Added TG bot code

This commit is contained in:
itqop 2024-02-28 01:27:42 +03:00
parent dbfb6ed9fd
commit 3cc34d3af0
9 changed files with 166 additions and 33 deletions

47
app.py
View File

@ -1,30 +1,23 @@
from app import MongoDB, aggregate_salaries, Settings from app import configs, configure_logger, router
import asyncio import asyncio
import json from aiogram import Bot, Dispatcher
from aiogram.enums.parse_mode import ParseMode
from aiogram.fsm.storage.memory import MemoryStorage
from loguru import logger
def register_logger():
configure_logger(capture_exceptions=True)
logger.info("Success logger register")
settings = Settings() async def start_app():
client = MongoDB(str(settings.DB_URI)) bot = Bot(token=configs.API_TOKEN_TG)
db = client.client[settings.DATABASE_NAME] dp = Dispatcher(storage=MemoryStorage())
collection = db[settings.COLLECTION_NAME] dp.include_router(router)
logger.info("Starting bot..")
json_str = ''' await dp.start_polling(bot, allowed_updates=dp.resolve_used_update_types())
{
"dt_from": "2022-09-01T00:00:00",
"dt_upto": "2022-12-31T23:59:00", if __name__ == "__main__":
"group_type": "month" loop = asyncio.get_event_loop()
} loop.run_until_complete(start_app())
''' loop.run_forever()
async def main(json_str):
data = json.loads(json_str)
dt_from = data["dt_from"]
dt_upto = data["dt_upto"]
group_type = data["group_type"]
result = await aggregate_salaries(collection, dt_from, dt_upto, group_type)
print(result['dataset'], len(result['dataset']))
print(result['labels'], len(result['labels']))
asyncio.run(main(json_str))

View File

@ -1,2 +1,3 @@
from app.config import Settings from app.config import configs
from app.database import MongoDB, aggregate_salaries from app.logger import configure_logger
from app.handlers import router

View File

@ -3,7 +3,7 @@ from pydantic import computed_field, MongoDsn
from pydantic_core import Url from pydantic_core import Url
class Settings(BaseSettings): class Configs(BaseSettings):
API_TOKEN_TG: str API_TOKEN_TG: str
HOST_MONGODB: str HOST_MONGODB: str
DATABASE_NAME: str DATABASE_NAME: str
@ -17,4 +17,6 @@ class Settings(BaseSettings):
f"mongodb+srv://{self.USERNAME_MONGO}:{self.PASSWORD_MONGO}@{self.HOST_MONGODB}" f"mongodb+srv://{self.USERNAME_MONGO}:{self.PASSWORD_MONGO}@{self.HOST_MONGODB}"
) )
model_config = SettingsConfigDict(env_file='.env', env_file_encoding='utf-8') model_config = SettingsConfigDict(env_file='.env', env_file_encoding='utf-8')
configs = Configs()

View File

@ -0,0 +1,7 @@
from pydantic import BaseModel
class MongoDBConfig(BaseModel):
url: str
db_name: str = None
collection: str = None

View File

@ -1,5 +1,19 @@
from motor.motor_asyncio import AsyncIOMotorClient from motor.motor_asyncio import AsyncIOMotorClient
from app.database.MongoDBConfig import MongoDBConfig
class MongoDB: class MongoDB:
def __init__(self, url: str): def __init__(self, config: MongoDBConfig):
self.client = AsyncIOMotorClient(url) self.client = AsyncIOMotorClient(config.url)
self.db = self.get_db(config.db_name)
self.collection = self.get_collection(config.collection)
def get_db(self, db_name: str):
if db_name is None:
return None
return self.client[db_name]
def get_collection(self, collection_name: str):
if collection_name is None or self.db is None:
return None
return self.db[collection_name]

30
app/handlers.py Normal file
View File

@ -0,0 +1,30 @@
import json
from app.database import MongoDB, aggregate_salaries
from app.config import configs
from aiogram import Router, types
from aiogram.filters import Command
from app.texts import invalid, greetings
from app.query import Query
from app.database.MongoDBConfig import MongoDBConfig
from loguru import logger
router = Router()
client_config = MongoDBConfig(url=str(configs.DB_URI), db_name=configs.DATABASE_NAME, collection=configs.COLLECTION_NAME)
client = MongoDB(config=client_config)
@router.message(Command("start"))
async def start_handler(message: types.Message):
await message.answer(greetings.format(name=message.from_user.first_name))
@router.message()
async def query_handler(message: types.Message):
try:
query = Query(**json.loads(message.text))
logger.info("Aggregate query starting")
result = await aggregate_salaries(client.collection, query.dt_from, query.dt_upto, query.group_type)
logger.info("Aggregate query complete")
await message.answer(str(result))
except ValueError as e:
await message.answer(invalid)

52
app/logger.py Normal file
View File

@ -0,0 +1,52 @@
import logging
import sys
from loguru import logger
class InterceptHandler(logging.Handler):
LEVELS_MAP = {
logging.CRITICAL: "CRITICAL",
logging.ERROR: "ERROR",
logging.WARNING: "WARNING",
logging.INFO: "INFO",
logging.DEBUG: "DEBUG",
}
def _get_level(self, record):
return self.LEVELS_MAP.get(record.levelno, record.levelno)
def emit(self, record):
logger_opt = logger.opt(depth=6, exception=record.exc_info)
logger_opt.log(self._get_level(record), record.getMessage())
def configure_logger(capture_exceptions: bool = False) -> None:
logger.remove()
level = "INFO"
logger.add(
"logs/log_{time:YYYY-MM-DD}.log",
rotation="12:00",
format="{time:YYYY-MM-DD HH:mm:ss} | {level} | {file}:{line} | {message}",
level="INFO",
encoding="utf-8",
compression="zip",
)
logger.add(
sys.stdout,
colorize=True,
format="<green>{time:YYYY-MM-DD at HH:mm:ss}</green> | <level>{level}</level> | {file}:{line} | "
"{message}",
level=level,
)
if capture_exceptions:
logger.add(
"logs/error_log_{time:YYYY-MM-DD}.log",
rotation="12:00",
format="{time:YYYY-MM-DD HH:mm:ss} | {level} | {file}:{line} | {message}",
level="ERROR",
encoding="utf-8",
compression="zip",
)
logging.basicConfig(handlers=[InterceptHandler()], level=logging.INFO)

32
app/query.py Normal file
View File

@ -0,0 +1,32 @@
from datetime import datetime
from pydantic import BaseModel, validator
class Query(BaseModel):
dt_from: str
dt_upto: str
group_type: str
@validator('group_type', pre=True, always=True)
def validate_group_type(cls, v):
if v not in ['month', 'day', 'hour']:
raise ValueError('Invalid group_type')
return v
@validator('dt_from', pre=True, always=True)
def validate_dt_from(cls, v):
try:
datetime.strptime(v, '%Y-%m-%dT%H:%M:%S')
except ValueError:
raise ValueError('Invalid dt_from')
return v
@validator('dt_upto', pre=True, always=True)
def validate_dt_upto(cls, v, values):
try:
datetime.strptime(v, '%Y-%m-%dT%H:%M:%S')
except ValueError:
raise ValueError('Invalid dt_upto')
if values['dt_from'] > v:
raise ValueError('dt_upto should be later than dt_from')
return v

2
app/texts.py Normal file
View File

@ -0,0 +1,2 @@
invalid = 'Невалидный запрос. Пример запроса: {"dt_from": "2022-09-01T00:00:00", "dt_upto": "2022-12-31T23:59:00", "group_type": "month"}'
greetings = 'Привет, {name}! Отправьте запрос в формате {{"dt_from": "2022-09-01T00:00:00", "dt_upto": "2022-12-31T23:59:00", "group_type": "month" || "day" || "hour"}}'