Убрал логику в сервис
This commit is contained in:
parent
fa45014591
commit
74626c7ead
14 changed files with 171 additions and 43 deletions
|
@ -2,4 +2,5 @@
|
|||
venv
|
||||
.idea
|
||||
.gitignore
|
||||
.git
|
||||
.git
|
||||
local_pgdata
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -160,5 +160,6 @@ cython_debug/
|
|||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
.test.env
|
||||
local_pgdata
|
||||
|
||||
|
||||
|
|
|
@ -10,6 +10,5 @@ RUN pip install -r requirements.txt
|
|||
|
||||
COPY . .
|
||||
|
||||
RUN alembic upgrade head
|
||||
RUN chmod a+x /url_shortener/docker/start.sh
|
||||
|
||||
CMD ["gunicorn", "app.main:app", "--workers", "1", "--worker-class", "uvicorn.workers.UvicornWorker", "--bind=0.0.0.0:8000"]
|
|
@ -1,5 +1,7 @@
|
|||
# Сокращатель ссылок
|
||||
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
|
||||
Данный проект предоставляет `API` для сокращения ссылок и отслеживания переходов по
|
||||
этим ссылкам.
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ from sqlalchemy import select, insert
|
|||
|
||||
from app.database import async_session_maker
|
||||
from app.models import URLs
|
||||
from app.shemas import SURLs
|
||||
|
||||
|
||||
class URLsDAO:
|
||||
|
@ -15,8 +16,10 @@ class URLsDAO:
|
|||
return result
|
||||
|
||||
@staticmethod
|
||||
async def find_by_hash(url_hash: int) -> URLs:
|
||||
async def find_by_hash(url_hash: int) -> SURLs:
|
||||
async with async_session_maker() as session:
|
||||
query = select(URLs).filter_by(url_hash=url_hash)
|
||||
result = await session.execute(query)
|
||||
return result.scalar_one_or_none()
|
||||
result: URLs = result.scalar_one_or_none()
|
||||
if result:
|
||||
return SURLs(url_hash=result.url_hash, original_url=result.original_url)
|
||||
|
|
38
app/main.py
38
app/main.py
|
@ -1,13 +1,9 @@
|
|||
import time
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from fastapi import FastAPI, status, HTTPException, Depends
|
||||
from fastapi import FastAPI, status, Depends
|
||||
from fastapi.responses import RedirectResponse
|
||||
|
||||
from app.config import settings
|
||||
from app.dao import URLsDAO
|
||||
from app.migrations.shemas import SURL, SNewUrl, SGetStats
|
||||
from app.shemas import SURL, SNewUrl, SGetStats
|
||||
from app.r import get_redis_client
|
||||
from app.service import get_visit_times, add_visit, get_original_url, add_url
|
||||
|
||||
app = FastAPI(
|
||||
title="Сокращатель ссылок",
|
||||
|
@ -18,29 +14,15 @@ app = FastAPI(
|
|||
async def zip_url(url: SURL):
|
||||
url = str(url.url)
|
||||
|
||||
url_hash = await URLsDAO.add_new_url(original_url=url)
|
||||
|
||||
new_url = urljoin(settings.SHORTENER_HOST, str(url_hash))
|
||||
|
||||
return {"new_url": new_url}
|
||||
return await add_url(url)
|
||||
|
||||
|
||||
@app.get("/{url_hash}", response_class=RedirectResponse, status_code=status.HTTP_307_TEMPORARY_REDIRECT)
|
||||
async def redirect(url_hash: int, r=Depends(get_redis_client)):
|
||||
if r.exists(str(url_hash)):
|
||||
url_bytes = r.get(str(url_hash))
|
||||
url = url_bytes.decode('utf-8')
|
||||
else:
|
||||
urls = await URLsDAO.find_by_hash(url_hash)
|
||||
if not urls:
|
||||
raise HTTPException(status_code=404, detail="Этого адреса не существует")
|
||||
url = urls.original_url
|
||||
r.set(str(url_hash), url)
|
||||
r.expire(str(url_hash), 60)
|
||||
url = await get_original_url(url_hash, r)
|
||||
|
||||
list_key = f"visit_times:{url_hash}"
|
||||
timestamp = int(time.time())
|
||||
r.lpush(list_key, timestamp)
|
||||
await add_visit(list_key, r)
|
||||
|
||||
return RedirectResponse(url=url)
|
||||
|
||||
|
@ -48,8 +30,6 @@ async def redirect(url_hash: int, r=Depends(get_redis_client)):
|
|||
@app.get("/stats/{url_hash}", response_model=SGetStats)
|
||||
async def get_stats(url_hash: int, r=Depends(get_redis_client)):
|
||||
list_key = f"visit_times:{url_hash}"
|
||||
if r.exists(list_key):
|
||||
timestamps = r.lrange(list_key, 0, -1)
|
||||
visit_times = len(timestamps)
|
||||
return {"visit_times": visit_times, "timestamps": timestamps}
|
||||
return {"visit_times": 0, "timestamps": []}
|
||||
|
||||
return await get_visit_times(list_key, r)
|
||||
|
||||
|
|
2
app/r.py
2
app/r.py
|
@ -4,4 +4,4 @@ from app.config import settings
|
|||
|
||||
|
||||
def get_redis_client():
|
||||
return redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_DB)
|
||||
return redis.asyncio.client.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_DB)
|
||||
|
|
48
app/service.py
Normal file
48
app/service.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
import time
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import redis
|
||||
from fastapi import HTTPException
|
||||
|
||||
from app.config import settings
|
||||
from app.dao import URLsDAO
|
||||
from app.shemas import SGetStats, SNewUrl
|
||||
|
||||
|
||||
async def get_visit_times(list_key: str, r: redis.Redis) -> SGetStats:
|
||||
if await r.exists(list_key):
|
||||
timestamps = await r.lrange(list_key, 0, -1)
|
||||
visit_times = len(timestamps)
|
||||
else:
|
||||
visit_times = 0
|
||||
timestamps = []
|
||||
|
||||
return SGetStats(visit_times=visit_times, timestamps=timestamps)
|
||||
|
||||
|
||||
async def add_visit(list_key: str, r: redis.Redis) -> None:
|
||||
timestamp = int(time.time())
|
||||
await r.lpush(list_key, timestamp)
|
||||
|
||||
|
||||
async def get_original_url(url_hash: int, r: redis.Redis) -> str:
|
||||
if await r.exists(str(url_hash)):
|
||||
url_bytes = await r.get(str(url_hash))
|
||||
url = url_bytes.decode('utf-8')
|
||||
else:
|
||||
urls = await URLsDAO.find_by_hash(url_hash)
|
||||
if not urls:
|
||||
raise HTTPException(status_code=404, detail="Этого адреса не существует")
|
||||
url = urls.original_url
|
||||
await r.set(str(url_hash), url)
|
||||
await r.expire(str(url_hash), 60)
|
||||
|
||||
return url
|
||||
|
||||
|
||||
async def add_url(url: str) -> SNewUrl:
|
||||
url_hash = await URLsDAO.add_new_url(original_url=url)
|
||||
|
||||
new_url = urljoin(settings.SHORTENER_HOST, str(url_hash))
|
||||
|
||||
return SNewUrl(new_url=new_url)
|
|
@ -12,3 +12,8 @@ class SNewUrl(BaseModel):
|
|||
class SGetStats(BaseModel):
|
||||
visit_times: int
|
||||
timestamps: list[int]
|
||||
|
||||
|
||||
class SURLs(BaseModel):
|
||||
url_hash: int
|
||||
original_url: str
|
|
@ -1,12 +1,9 @@
|
|||
import json
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import insert
|
||||
from httpx import AsyncClient
|
||||
|
||||
from app.config import settings
|
||||
from app.database import Base, async_session_maker, engine
|
||||
from app.models import URLs
|
||||
from app.database import Base, engine
|
||||
from app.models import URLs # noqa
|
||||
from app.main import app as fastapi_app
|
||||
from app.r import get_redis_client
|
||||
|
||||
|
@ -25,7 +22,7 @@ async def prepare_redis():
|
|||
assert settings.MODE == "TEST"
|
||||
|
||||
r = get_redis_client()
|
||||
r.flushdb()
|
||||
await r.flushdb()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
|
|
|
@ -6,7 +6,30 @@ services:
|
|||
context: .
|
||||
container_name: url_shortener
|
||||
ports:
|
||||
- 8000:8000
|
||||
|
||||
- "8000:8000"
|
||||
depends_on:
|
||||
- redis
|
||||
- postgres
|
||||
command: ["/url_shortener/docker/start.sh"]
|
||||
|
||||
redis:
|
||||
image: redis:latest
|
||||
container_name: redis
|
||||
ports:
|
||||
- "6379:6379"
|
||||
|
||||
postgres:
|
||||
image: postgres
|
||||
container_name: postgres
|
||||
environment:
|
||||
POSTGRES_USER: root
|
||||
POSTGRES_PASSWORD: root
|
||||
POSTGRES_DB: test_db
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- local_pgdata:/var/lib/postgresql/data
|
||||
volumes:
|
||||
local_pgdata:
|
||||
|
||||
|
||||
|
|
5
docker/start.sh
Normal file
5
docker/start.sh
Normal file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/bash
|
||||
|
||||
alembic upgrade head
|
||||
|
||||
gunicorn app.main:app --workers 1 --worker-class uvicorn.workers.UvicornWorker --bind=0.0.0.0:8000
|
|
@ -11,6 +11,7 @@ fastapi==0.110.1
|
|||
greenlet==3.0.3
|
||||
gunicorn==21.2.0
|
||||
h11==0.14.0
|
||||
hiredis==2.3.2
|
||||
httpcore==1.0.5
|
||||
httptools==0.6.1
|
||||
httpx==0.27.0
|
||||
|
@ -34,6 +35,7 @@ python-dotenv==1.0.1
|
|||
python-multipart==0.0.9
|
||||
PyYAML==6.0.1
|
||||
redis==5.0.3
|
||||
ruff==0.3.5
|
||||
sniffio==1.3.1
|
||||
SQLAlchemy==2.0.29
|
||||
starlette==0.37.2
|
||||
|
|
62
ruff.toml
Normal file
62
ruff.toml
Normal file
|
@ -0,0 +1,62 @@
|
|||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
".bzr",
|
||||
".direnv",
|
||||
".eggs",
|
||||
".git",
|
||||
".git-rewrite",
|
||||
".hg",
|
||||
".ipynb_checkpoints",
|
||||
".mypy_cache",
|
||||
".nox",
|
||||
".pants.d",
|
||||
".pyenv",
|
||||
".pytest_cache",
|
||||
".pytype",
|
||||
".ruff_cache",
|
||||
".svn",
|
||||
".tox",
|
||||
".venv",
|
||||
".vscode",
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"site-packages",
|
||||
"venv",
|
||||
".idea",
|
||||
]
|
||||
|
||||
# Same as Black.
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.8
|
||||
target-version = "py311"
|
||||
|
||||
[lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
|
||||
# Allow fix for all enabled rules (when `--fix`) is provided.
|
||||
fixable = ["ALL"]
|
||||
unfixable = []
|
||||
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[format]
|
||||
# Like Black, use double quotes for strings.
|
||||
quote-style = "double"
|
||||
|
||||
# Like Black, indent with spaces, rather than tabs.
|
||||
indent-style = "space"
|
||||
|
||||
# Like Black, respect magic trailing commas.
|
||||
skip-magic-trailing-comma = false
|
||||
|
||||
# Like Black, automatically detect the appropriate line ending.
|
||||
line-ending = "auto"
|
Loading…
Add table
Reference in a new issue