Compare commits
191 Commits
bc12448d03
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| c8af9ffa4d | |||
| ed00d1483d | |||
| c71a460170 | |||
| cb0168a199 | |||
| 678dd18e12 | |||
| 52860f9a14 | |||
| c9e07b8368 | |||
| bd83f654c5 | |||
| cc45da84d1 | |||
| e8c776ac66 | |||
| 03dd3f9db4 | |||
| 43932a93e0 | |||
| d10fa2f08a | |||
| 85492a31e8 | |||
| c492e9a57e | |||
| d96a3b0512 | |||
| 0caf1f3f42 | |||
| bd7d9a19d3 | |||
| 4607c31ccf | |||
| d4d25b4df5 | |||
| 2bec0c579f | |||
| bdfa885cba | |||
| 816dcac16c | |||
| 329616bcd3 | |||
| 4f08019d85 | |||
| 74faa57805 | |||
| be13c77164 | |||
| 42ce73dd6a | |||
| 36ddf7d6a7 | |||
| cd374569dd | |||
| 4303f679b5 | |||
| 68f94cc9a4 | |||
| c08c2c04c4 | |||
| 10683a9a42 | |||
| 21794da3d2 | |||
| a8ed0976fe | |||
| b5c8e35910 | |||
| f083c19cdc | |||
| e6674bd576 | |||
| 177f3a3db3 | |||
| 95f0c26498 | |||
| 978a684278 | |||
| 2730ced37e | |||
| 2a8833d7a9 | |||
| ba2a95f22e | |||
| 00c112b20d | |||
| d27390426c | |||
| 498ab093f3 | |||
| a81e692957 | |||
| 00522da68f | |||
| 2cdccb33ca | |||
| 06507975ed | |||
| b5d195998c | |||
| fbecb61885 | |||
| e24e194199 | |||
| b979a366c6 | |||
| ecb8d88b4f | |||
| a466e46f28 | |||
| d4b6720188 | |||
| 0e6563f54d | |||
| 069bab1c01 | |||
| 15d63e35f4 | |||
| 14cc9ec303 | |||
| 30e7bafc88 | |||
| 6030591e3c | |||
| 3d9a3cef73 | |||
| 6c2698679d | |||
| 487174c4ff | |||
| c75ff66f27 | |||
| 1e7c3fb9fc | |||
| f8d0f430ac | |||
| 6fb9dcf6f0 | |||
| a2c9fd8e3b | |||
| c6c006d45b | |||
| 1af78ce08a | |||
| a509a3a586 | |||
| 5b9e75dddf | |||
| 9ee3f87de9 | |||
| 2aa84837e4 | |||
| 7e160c57aa | |||
| d44c9bd1b9 | |||
| 78c14b4252 | |||
| d705733d89 | |||
| 8857da1063 | |||
| 7e301ea5ce | |||
| de4c2677aa | |||
| 2c4037c1b5 | |||
| defe31b55e | |||
| b8947ce68e | |||
| 43a95ef75c | |||
| a4d59b740d | |||
| 6d3991054b | |||
| d609c10edb | |||
| 1f26f94d96 | |||
| e65ca07acc | |||
| 2abc198a39 | |||
| 3234eb7bea | |||
| e8af9f2237 | |||
| c65ca39d08 | |||
| a791f7edf8 | |||
| 144811ec87 | |||
| e5712224e1 | |||
| daa34b2808 | |||
| 863dd226c3 | |||
| d56e292276 | |||
| 9a6fc3fdd7 | |||
| 2bd2486653 | |||
| 0f885804e9 | |||
| d8b2e43add | |||
| bf4fb2b3af | |||
| f66e960f09 | |||
| 96998a4c05 | |||
| 41c54d7bb7 | |||
| c913336ade | |||
| 88ce256850 | |||
| 48c88cb93a | |||
| 81d5261af1 | |||
| 0adc36827a | |||
| 42a193dae7 | |||
| 8652d31f6d | |||
| e36fad70c5 | |||
| 8733e6d94b | |||
| c9be4f098e | |||
| 0c5eca6cc4 | |||
| e873325c34 | |||
| 9707faf60c | |||
| 915206d969 | |||
| 3dbbae2173 | |||
| 29211dd442 | |||
| 0dd3c50be8 | |||
| e2d35fb7c4 | |||
| 3453c394e5 | |||
| bbe9832923 | |||
| 1dde0089b9 | |||
| 58814def8c | |||
| 1e260f2e42 | |||
| b98e98cabd | |||
| eff71b3570 | |||
| 915a1d5f28 | |||
| 30ec0528a4 | |||
| 6890d6b79c | |||
| b1034d437e | |||
| ab31c7c195 | |||
| 25e6cf0e7e | |||
| 151c7767b1 | |||
| fced9b8101 | |||
| 4f9d55d2c6 | |||
| bd499fc602 | |||
| 25060322a1 | |||
| 5ed42d99dc | |||
| 67b500761c | |||
| cbe3697f1b | |||
| 750913d3a6 | |||
| fa33313068 | |||
| 9a12ddb6af | |||
| 4fa5c0d05b | |||
| e4d9b5ef16 | |||
| 80b6dc3aec | |||
| cc6cae2b59 | |||
| 0407177085 | |||
| ab5fc94e1b | |||
| 3db54fdb13 | |||
| 8075ce04b0 | |||
| c5f7ce651f | |||
| 1eb85279b5 | |||
| eafd4c94f5 | |||
| 4167640bd8 | |||
| 4994f2c026 | |||
| 7ee4ce36ce | |||
| 9a2ec02ac4 | |||
| aae7e96001 | |||
| e153d4a1c0 | |||
| 4e5365b946 | |||
| 0a34addae1 | |||
| 64dd40a072 | |||
| ca6e7d5b37 | |||
| 766ded04a5 | |||
| 8bf956201a | |||
| db71481da8 | |||
| 590042521d | |||
| 61d379e7dc | |||
| 413d8755cc | |||
| 489f65a087 | |||
| d9c43624c0 | |||
| 6f2c4b0672 | |||
| 91cf44f3ae | |||
| 5df64d4916 | |||
| 0e54b0edd3 | |||
| 0b1395f86d | |||
| 52681f3f61 | |||
| 973351a3af |
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@@ -0,0 +1 @@
|
||||
.venv
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,8 +1,11 @@
|
||||
/migrations/versions/*
|
||||
/migrations_wms/versions/*
|
||||
.env
|
||||
.venv
|
||||
.idea
|
||||
__pycache__
|
||||
/venv
|
||||
/test
|
||||
/test/*
|
||||
/test/*
|
||||
certs
|
||||
.vscode
|
||||
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.11
|
||||
33
Dockerfile
Normal file
33
Dockerfile
Normal file
@@ -0,0 +1,33 @@
|
||||
FROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim AS builder
|
||||
ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy
|
||||
|
||||
# Disable Python downloads, because we want to use the system interpreter
|
||||
# across both images. If using a managed Python version, it needs to be
|
||||
# copied from the build image into the final image; see `standalone.Dockerfile`
|
||||
# for an example.
|
||||
ENV UV_PYTHON_DOWNLOADS=0
|
||||
|
||||
# Install git
|
||||
|
||||
WORKDIR /app
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
uv sync --locked --no-install-project --no-dev
|
||||
COPY . /app
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --locked --no-dev
|
||||
|
||||
# Then, use a final image without uv
|
||||
FROM python:3.11-slim-bookworm
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y weasyprint && \
|
||||
apt clean && \
|
||||
rm -rf /var/cache/apt/*
|
||||
|
||||
|
||||
# Copy the application from the builder
|
||||
COPY --from=builder --chown=app:app /app /app
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
WORKDIR /app
|
||||
114
alembic.wms.ini
Normal file
114
alembic.wms.ini
Normal file
@@ -0,0 +1,114 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations_wms
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to migrations/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql://{PG_LOGIN}:{PG_PASSWORD}@{PG_HOST}/{PG_DATABASE}
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
BIN
assets/fonts/Arial Nova Cond.ttf
Normal file
BIN
assets/fonts/Arial Nova Cond.ttf
Normal file
Binary file not shown.
33
auth/jwt.py
33
auth/jwt.py
@@ -1,33 +0,0 @@
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import HTTPException, Depends
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
from sqlalchemy import select
|
||||
from starlette import status
|
||||
|
||||
from backend import config
|
||||
from database import User
|
||||
from jose import jwt
|
||||
|
||||
from database.base import DatabaseDependency
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer("")
|
||||
ALGORITHM = "HS256"
|
||||
|
||||
|
||||
def generate_jwt_token(user: User):
|
||||
return jwt.encode({'sub': user.id}, settings.SECRET_KEY, algorithm=ALGORITHM)
|
||||
|
||||
|
||||
def require_jwt_sub(token: Annotated[str, Depends(oauth2_scheme)]):
|
||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[ALGORITHM])
|
||||
user_id = payload.get("sub")
|
||||
if not user_id:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail='Invalid authorization credentials')
|
||||
return payload
|
||||
|
||||
|
||||
async def get_current_user(db_session: DatabaseDependency, user_id: Annotated[int, Depends(require_jwt_sub)]) -> User:
|
||||
user = await db_session.scalar(select(User).where(User.id == user_id))
|
||||
if user:
|
||||
return user
|
||||
@@ -1,27 +0,0 @@
|
||||
import hmac
|
||||
import hashlib
|
||||
|
||||
from backend import config
|
||||
|
||||
|
||||
def _generate_hash(telegram_data: dict):
|
||||
data = telegram_data.copy()
|
||||
del data['hash']
|
||||
keys = sorted(data.keys())
|
||||
string_arr = []
|
||||
for key in keys:
|
||||
if data[key] is not None:
|
||||
string_arr.append(key + '=' + str(data[key]))
|
||||
string_cat = '\n'.join(string_arr)
|
||||
|
||||
secret_key = hashlib.sha256(settings.TELEGRAM_BOT_TOKEN.encode('utf-8')).digest()
|
||||
hash_bytes = bytes(string_cat, 'utf-8')
|
||||
hmac_hash = hmac.new(secret_key, hash_bytes, hashlib.sha256).hexdigest()
|
||||
return hmac_hash
|
||||
|
||||
|
||||
def telegram_authorize(telegram_data: dict):
|
||||
generated_hash = _generate_hash(telegram_data)
|
||||
user_hash = telegram_data['hash']
|
||||
return generated_hash == user_hash
|
||||
|
||||
@@ -4,22 +4,41 @@ from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Database
|
||||
# FF database
|
||||
PG_LOGIN = os.environ.get('PG_LOGIN')
|
||||
PG_PASSWORD = os.environ.get('PG_PASSWORD')
|
||||
PG_PORT = os.environ.get('PG_PORT')
|
||||
PG_DATABASE = os.environ.get('PG_DATABASE')
|
||||
PG_HOST = os.environ.get('PG_HOST')
|
||||
|
||||
# WMS database
|
||||
WMS_PG_LOGIN = os.environ.get('WMS_PG_LOGIN')
|
||||
WMS_PG_PASSWORD = os.environ.get('WMS_PG_PASSWORD')
|
||||
WMS_PG_PORT = os.environ.get('WMS_PG_PORT')
|
||||
WMS_PG_DATABASE = os.environ.get('WMS_PG_DATABASE')
|
||||
WMS_PG_HOST = os.environ.get('WMS_PG_HOST')
|
||||
|
||||
# Telegram
|
||||
TELEGRAM_BOT_TOKEN = os.environ.get('TELEGRAM_BOT_TOKEN')
|
||||
|
||||
SECRET_KEY = os.environ.get('SECRET_KEY')
|
||||
S3_API_KEY = os.environ.get('S3_API_KEY')
|
||||
|
||||
# Billing
|
||||
BILLING_API_KEY = os.environ.get('BILLING_API_KEY')
|
||||
BILLING_URL = os.environ.get('BILLING_URL')
|
||||
|
||||
CHAT_TELEGRAM_BOT_TOKEN = os.environ.get('CHAT_TELEGRAM_BOT_TOKEN')
|
||||
CHAT_CONNECTOR_API_KEY = os.environ.get('CHAT_CONNECTOR_API_KEY')
|
||||
CHATS_SYNC_URL = os.environ.get('CHATS_SYNC_URL')
|
||||
|
||||
# Kafka
|
||||
KAFKA_URL = os.environ.get('KAFKA_URL')
|
||||
KAFKA_CONSUMER_TOPIC = os.environ.get('KAFKA_CONSUMER_TOPIC')
|
||||
KAFKA_PRODUCER_TOPIC = os.environ.get('KAFKA_PRODUCER_TOPIC')
|
||||
KAFKA_ENABLE_SSL = os.environ.get('KAFKA_ENABLE_SSL', 'true').lower() in ('true', '1', 't')
|
||||
|
||||
# Celery
|
||||
CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL')
|
||||
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND')
|
||||
|
||||
WB_SECRET_TOKEN = os.environ.get("WB_SECRET_TOKEN")
|
||||
|
||||
@@ -4,13 +4,16 @@ from fastapi import Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from backend.session import get_session
|
||||
from backend.wms_session import get_wms_session
|
||||
from models import User
|
||||
from schemas.auth import UserUnion
|
||||
from schemas.base import PaginationSchema
|
||||
from services.auth import get_current_user, authorized_user, guest_user
|
||||
from utils.dependecies import pagination_parameters
|
||||
|
||||
SessionDependency = Annotated[AsyncSession, Depends(get_session)]
|
||||
WmsSessionDependency = Annotated[AsyncSession, Depends(get_wms_session)]
|
||||
PaginationDependency = Annotated[PaginationSchema, Depends(pagination_parameters)]
|
||||
CurrentUserDependency = Annotated[User, Depends(get_current_user)]
|
||||
CurrentUserDependency = Annotated[UserUnion, Depends(get_current_user)]
|
||||
AuthorizedUserDependency = Annotated[User, Depends(authorized_user)]
|
||||
GuestUserDependency = Annotated[User, Depends(guest_user)]
|
||||
|
||||
21
backend/wms_session.py
Normal file
21
backend/wms_session.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from .config import WMS_PG_DATABASE, WMS_PG_HOST, WMS_PG_PASSWORD, WMS_PG_LOGIN
|
||||
|
||||
DATABASE_URL = f'postgresql+asyncpg://{WMS_PG_LOGIN}:{WMS_PG_PASSWORD}@{WMS_PG_HOST}/{WMS_PG_DATABASE}'
|
||||
engine = create_async_engine(DATABASE_URL)
|
||||
wms_session_maker = sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
)
|
||||
|
||||
|
||||
async def get_wms_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with wms_session_maker() as session:
|
||||
yield session
|
||||
@@ -4,4 +4,6 @@ from models import Product
|
||||
|
||||
class SizeAttributeGetter(BaseAttributeGetter):
|
||||
def get_value(self, product: Product):
|
||||
return product.size
|
||||
if not product.size:
|
||||
return None
|
||||
return f'<font size="8" name="Helvetica-Bold"><b>{product.size}</b></font>'
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
from abc import abstractmethod
|
||||
from typing import List, Dict
|
||||
|
||||
from models import ProductBarcode, Product, BarcodeTemplate
|
||||
from models import BarcodeTemplate, Product
|
||||
|
||||
|
||||
class BaseBarcodeGenerator:
|
||||
|
||||
@abstractmethod
|
||||
def generate(self,
|
||||
barcode: ProductBarcode,
|
||||
product: Product,
|
||||
template: BarcodeTemplate):
|
||||
def generate(self, data: List[Dict[str, str | Product | BarcodeTemplate | int]]):
|
||||
pass
|
||||
|
||||
@@ -1,26 +1,44 @@
|
||||
from io import BytesIO
|
||||
from typing import List
|
||||
|
||||
from barcodes.attributes import AttributeWriterFactory
|
||||
from barcodes.generator.base import BaseBarcodeGenerator
|
||||
from barcodes.pdf import PDFGenerator
|
||||
from models import ProductBarcode, Product, BarcodeTemplate
|
||||
from barcodes.types import BarcodeData, PdfBarcodeGenData, PdfBarcodeImageGenData
|
||||
|
||||
|
||||
class DefaultBarcodeGenerator(BaseBarcodeGenerator):
|
||||
def generate(self,
|
||||
barcode: str,
|
||||
product: Product,
|
||||
template: BarcodeTemplate,
|
||||
quantity: int = 1):
|
||||
def generate(self, barcodes_data: List[BarcodeData | PdfBarcodeImageGenData]) -> BytesIO:
|
||||
pdf_generator = PDFGenerator()
|
||||
attributes = {}
|
||||
for attribute in template.attributes:
|
||||
attribute_getter = AttributeWriterFactory.get_writer(attribute.key)
|
||||
if not attribute_getter:
|
||||
continue
|
||||
value = attribute_getter.get_value(product)
|
||||
if not value or not value.strip():
|
||||
continue
|
||||
attributes[attribute.name] = value
|
||||
for additional_attribute in template.additional_attributes:
|
||||
attributes[additional_attribute.name] = additional_attribute.value
|
||||
barcode_text = '<br/>'.join([f'{key}: {value}' for key, value in attributes.items()])
|
||||
return pdf_generator.generate(barcode, barcode_text, num_duplicates=quantity)
|
||||
|
||||
pdf_barcodes_gen_data: List[PdfBarcodeGenData | PdfBarcodeImageGenData] = []
|
||||
|
||||
for barcode_data in barcodes_data:
|
||||
if "barcode" in barcode_data:
|
||||
attributes = {}
|
||||
for attribute in barcode_data["template"].attributes:
|
||||
attribute_getter = AttributeWriterFactory.get_writer(attribute.key)
|
||||
if not attribute_getter:
|
||||
continue
|
||||
value = attribute_getter.get_value(barcode_data["product"])
|
||||
|
||||
if not value or not value.strip():
|
||||
continue
|
||||
attributes[attribute.name] = value
|
||||
for additional_attribute in barcode_data["template"].additional_attributes:
|
||||
value = additional_attribute.value
|
||||
if not value:
|
||||
continue
|
||||
attributes[additional_attribute.name] = value
|
||||
barcode_text = '<br/>'.join([f'{key}: {value}' for key, value in attributes.items()])
|
||||
|
||||
pdf_barcodes_gen_data.append({
|
||||
"barcode_value": barcode_data["barcode"],
|
||||
"text": barcode_text,
|
||||
"num_duplicates": barcode_data["num_duplicates"]
|
||||
})
|
||||
else:
|
||||
pdf_barcodes_gen_data.append(barcode_data)
|
||||
|
||||
return pdf_generator.generate(pdf_barcodes_gen_data)
|
||||
|
||||
|
||||
1
barcodes/images_uploader/__init__.py
Normal file
1
barcodes/images_uploader/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .images_uploader import BarcodeImagesUploader
|
||||
21
barcodes/images_uploader/base.py
Normal file
21
barcodes/images_uploader/base.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from abc import abstractmethod
|
||||
from typing import BinaryIO
|
||||
|
||||
|
||||
class BaseImagesUploader:
|
||||
|
||||
@abstractmethod
|
||||
def get_url(self, filename: str) -> bytes:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_abs_path(self, filename: str) -> bytes:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete(self, filename: str):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def upload(self, file: BinaryIO, filename: str) -> str:
|
||||
pass
|
||||
37
barcodes/images_uploader/images_uploader.py
Normal file
37
barcodes/images_uploader/images_uploader.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from pathlib import Path
|
||||
from typing import BinaryIO
|
||||
from uuid import uuid4
|
||||
|
||||
from aioshutil import copyfileobj
|
||||
|
||||
from barcodes.images_uploader.base import BaseImagesUploader
|
||||
from constants import APP_PATH, API_ROOT
|
||||
|
||||
|
||||
class BarcodeImagesUploader(BaseImagesUploader):
|
||||
def __init__(self):
|
||||
self.relative_path = Path("static/images/product_barcodes")
|
||||
self.storage_path = APP_PATH / self.relative_path
|
||||
if not Path.exists(self.storage_path):
|
||||
Path.mkdir(self.storage_path)
|
||||
|
||||
def get_url(self, filename: str) -> str:
|
||||
file_location = self.relative_path / filename
|
||||
return f"{API_ROOT}/{file_location}"
|
||||
|
||||
def get_abs_path(self, filename: str) -> str:
|
||||
file_location = self.storage_path / filename
|
||||
return file_location
|
||||
|
||||
def delete(self, filename: str):
|
||||
file_location = self.storage_path / filename
|
||||
if file_location.exists():
|
||||
file_location.unlink()
|
||||
|
||||
async def upload(self, file: BinaryIO, filename: str) -> str:
|
||||
filename = str(uuid4()) + '.' + filename.split('.')[-1]
|
||||
file_location = self.storage_path / filename
|
||||
with open(file_location, 'wb') as buffer:
|
||||
await copyfileobj(file, buffer)
|
||||
|
||||
return filename
|
||||
@@ -1,12 +1,17 @@
|
||||
import os
|
||||
from io import BytesIO
|
||||
from typing import List
|
||||
|
||||
from reportlab.graphics.barcode import code128
|
||||
from reportlab.lib.pagesizes import mm
|
||||
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
|
||||
from reportlab.pdfbase import pdfmetrics
|
||||
from reportlab.pdfbase.ttfonts import TTFont
|
||||
from reportlab.pdfgen import canvas
|
||||
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, PageBreak
|
||||
|
||||
from barcodes.pdf.pdf_maker import PdfMaker
|
||||
from barcodes.types import PdfBarcodeImageGenData, PdfBarcodeGenData
|
||||
from constants import APP_PATH
|
||||
|
||||
|
||||
@@ -17,6 +22,7 @@ class PDFGenerator:
|
||||
FONT_FILE_PATH = os.path.join(FONTS_FOLDER, 'DejaVuSans.ttf')
|
||||
self.page_width = 58 * mm
|
||||
self.page_height = 40 * mm
|
||||
self.number_of_spacing_pages = 2
|
||||
|
||||
pdfmetrics.registerFont(TTFont('DejaVuSans', FONT_FILE_PATH))
|
||||
|
||||
@@ -30,19 +36,21 @@ class PDFGenerator:
|
||||
leading=7,
|
||||
spaceAfter=2,
|
||||
leftIndent=2,
|
||||
rightIndent=2
|
||||
rightIndent=2,
|
||||
)
|
||||
|
||||
def generate_small_text(self, barcode_value, text, num_duplicates=1):
|
||||
buffer = BytesIO()
|
||||
|
||||
# Create document with specified page size
|
||||
doc = SimpleDocTemplate(buffer,
|
||||
pagesize=(self.page_width, self.page_height),
|
||||
rightMargin=1 * mm,
|
||||
leftMargin=1 * mm,
|
||||
topMargin=1 * mm,
|
||||
bottomMargin=1 * mm)
|
||||
doc = SimpleDocTemplate(
|
||||
buffer,
|
||||
pagesize=(self.page_width, self.page_height),
|
||||
rightMargin=1 * mm,
|
||||
leftMargin=1 * mm,
|
||||
topMargin=1 * mm,
|
||||
bottomMargin=1 * mm
|
||||
)
|
||||
|
||||
# Create paragraph with new style
|
||||
paragraph = Paragraph(text, self.small_style)
|
||||
@@ -58,10 +66,12 @@ class PDFGenerator:
|
||||
|
||||
# Calculate barWidth
|
||||
bar_width = available_width / num_elements
|
||||
barcode = code128.Code128(barcode_value,
|
||||
barWidth=bar_width,
|
||||
barHeight=barcode_height,
|
||||
humanReadable=True)
|
||||
barcode = code128.Code128(
|
||||
barcode_value,
|
||||
barWidth=bar_width,
|
||||
barHeight=barcode_height,
|
||||
humanReadable=True
|
||||
)
|
||||
|
||||
# Function to draw barcode on canvas
|
||||
def add_barcode(canvas, doc):
|
||||
@@ -82,19 +92,23 @@ class PDFGenerator:
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
|
||||
def generate(self, barcode_value, text, num_duplicates=1):
|
||||
buffer = BytesIO()
|
||||
# Создание документа с указанным размером страницы
|
||||
def _create_doc(self, buffer):
|
||||
return SimpleDocTemplate(
|
||||
buffer,
|
||||
pagesize=(self.page_width, self.page_height),
|
||||
rightMargin=1 * mm,
|
||||
leftMargin=1 * mm,
|
||||
topMargin=1 * mm,
|
||||
bottomMargin=1 * mm
|
||||
)
|
||||
|
||||
# Создаем документ с указанным размером страницы
|
||||
doc = SimpleDocTemplate(buffer,
|
||||
pagesize=(self.page_width, self.page_height),
|
||||
rightMargin=1 * mm,
|
||||
leftMargin=1 * mm,
|
||||
topMargin=1 * mm,
|
||||
bottomMargin=1 * mm)
|
||||
def _generate_for_one_product(self, barcode_data: PdfBarcodeGenData) -> BytesIO:
|
||||
buffer = BytesIO()
|
||||
doc = self._create_doc(buffer)
|
||||
|
||||
# Создаем абзац с новым стилем
|
||||
paragraph = Paragraph(text, self.small_style)
|
||||
paragraph = Paragraph(barcode_data['text'], self.small_style)
|
||||
|
||||
# Получаем ширину и высоту абзаца
|
||||
paragraph_width, paragraph_height = paragraph.wrap(self.page_width - 2 * mm, self.page_height)
|
||||
@@ -108,14 +122,23 @@ class PDFGenerator:
|
||||
available_width = self.page_width - 2 * mm # Учитываем поля
|
||||
|
||||
# Приблизительное количество элементов в штрихкоде Code 128 для средней длины
|
||||
num_elements = 11 * len(barcode_value) # Примерная оценка: 11 элементов на символ
|
||||
num_elements = 11 * len(barcode_data['barcode_value']) # Примерная оценка: 11 элементов на символ
|
||||
|
||||
# Рассчитываем ширину штриха
|
||||
bar_width = available_width / num_elements
|
||||
barcode = code128.Code128(barcode_value,
|
||||
barWidth=bar_width,
|
||||
barHeight=barcode_height,
|
||||
humanReadable=True)
|
||||
barcode = code128.Code128(
|
||||
barcode_data['barcode_value'],
|
||||
barWidth=bar_width,
|
||||
barHeight=barcode_height,
|
||||
humanReadable=True
|
||||
)
|
||||
|
||||
# Добавление штрихкодов в список элементов документа
|
||||
elements = []
|
||||
for _ in range(barcode_data['num_duplicates']):
|
||||
elements.append(paragraph)
|
||||
elements.append(Spacer(1, space_between_text_and_barcode)) # Отступ между текстом и штрихкодом
|
||||
elements.append(PageBreak())
|
||||
|
||||
# Функция для отрисовки штрихкода на canvas
|
||||
def add_barcode(canvas, doc):
|
||||
@@ -124,16 +147,45 @@ class PDFGenerator:
|
||||
barcode_y = human_readable_height + 2 * mm # Размещаем штрихкод снизу с учетом отступа
|
||||
barcode.drawOn(canvas, barcode_x, barcode_y)
|
||||
|
||||
# Создаем список элементов для добавления в документ
|
||||
elements = []
|
||||
for _ in range(num_duplicates):
|
||||
elements.append(paragraph)
|
||||
elements.append(Spacer(1, space_between_text_and_barcode)) # Отступ между текстом и штрихкодом
|
||||
elements.append(PageBreak())
|
||||
|
||||
# Создаем документ
|
||||
doc.build(elements[:-1], onFirstPage=add_barcode, onLaterPages=add_barcode) # Убираем последний PageBreak
|
||||
doc.build(elements, onFirstPage=add_barcode, onLaterPages=add_barcode) # Убираем последний PageBreak
|
||||
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
|
||||
def _generate_for_one_product_using_img(self, barcode_data: PdfBarcodeImageGenData) -> BytesIO:
|
||||
with open(barcode_data["barcode_image_url"], 'rb') as pdf_file:
|
||||
pdf_bytes = pdf_file.read()
|
||||
|
||||
pdf_maker = PdfMaker((self.page_width, self.page_height))
|
||||
for _ in range(barcode_data['num_duplicates']):
|
||||
pdf_maker.add_pdfs(BytesIO(pdf_bytes))
|
||||
|
||||
return pdf_maker.get_bytes()
|
||||
|
||||
def _generate_spacers(self) -> BytesIO:
|
||||
buffer = BytesIO()
|
||||
doc = self._create_doc(buffer)
|
||||
elements = []
|
||||
for _ in range(self.number_of_spacing_pages):
|
||||
elements.append(PageBreak())
|
||||
doc.build(elements)
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
|
||||
def generate(self, barcodes_data: List[PdfBarcodeGenData | PdfBarcodeImageGenData]) -> BytesIO:
|
||||
pdf_maker = PdfMaker((self.page_width, self.page_height))
|
||||
|
||||
pdf_files: list[BytesIO] = []
|
||||
|
||||
for barcode_data in barcodes_data:
|
||||
if "barcode_value" in barcode_data:
|
||||
pdf_files.append(self._generate_for_one_product(barcode_data))
|
||||
else:
|
||||
pdf_files.append(self._generate_for_one_product_using_img(barcode_data))
|
||||
pdf_files.append(self._generate_spacers())
|
||||
|
||||
for file in pdf_files[:-1]:
|
||||
pdf_maker.add_pdfs(file)
|
||||
|
||||
return pdf_maker.get_bytes()
|
||||
|
||||
83
barcodes/pdf/pdf_maker.py
Normal file
83
barcodes/pdf/pdf_maker.py
Normal file
@@ -0,0 +1,83 @@
|
||||
from io import BytesIO
|
||||
|
||||
import fitz
|
||||
import pdfrw
|
||||
from fpdf import FPDF
|
||||
|
||||
|
||||
class PdfMaker:
|
||||
def __init__(self, size: tuple):
|
||||
self.size = size
|
||||
|
||||
self.writer = pdfrw.PdfWriter()
|
||||
|
||||
def clear(self):
|
||||
del self.writer
|
||||
self.writer = pdfrw.PdfWriter()
|
||||
|
||||
def add_image(self, image_data):
|
||||
size = self.size
|
||||
|
||||
fpdf = FPDF(format=size, unit="pt")
|
||||
width, height = self.size
|
||||
fpdf.add_page()
|
||||
fpdf.image(image_data, 0, 0, width, height)
|
||||
fpdf_reader: pdfrw.PdfReader = pdfrw.PdfReader(fdata=bytes(fpdf.output()))
|
||||
self.writer.addpage(fpdf_reader.getPage(0))
|
||||
|
||||
def add_pdf(self, pdf_data: BytesIO):
|
||||
pdf_reader = pdfrw.PdfReader(fdata=bytes(pdf_data.read()))
|
||||
self.writer.addpage(pdf_reader.getPage(0))
|
||||
|
||||
def add_pdfs(self, pdf_data: BytesIO):
|
||||
pdf_reader = pdfrw.PdfReader(fdata=bytes(pdf_data.read()))
|
||||
self.writer.addpages(pdf_reader.readpages(pdf_reader.Root))
|
||||
|
||||
def get_bytes(self):
|
||||
result_io = BytesIO()
|
||||
self.writer.write(result_io)
|
||||
result_io.seek(0)
|
||||
return result_io
|
||||
|
||||
@staticmethod
|
||||
def _get_target_rect(page: fitz.Page, target_ratio: float) -> fitz.Rect:
|
||||
original_width, original_height = page.rect.width, page.rect.height
|
||||
|
||||
if original_width / original_height > target_ratio:
|
||||
# Image is wider than target aspect ratio
|
||||
new_width = original_width
|
||||
new_height = int(original_width / target_ratio)
|
||||
else:
|
||||
# Image is taller than target aspect ratio
|
||||
new_height = original_height
|
||||
new_width = int(new_height * target_ratio)
|
||||
|
||||
return fitz.Rect(0, 0, new_width, new_height)
|
||||
|
||||
@staticmethod
|
||||
def resize_pdf_with_reportlab(input_pdf_bytesio: BytesIO) -> BytesIO:
|
||||
output_pdf = BytesIO()
|
||||
|
||||
pdf_document = fitz.open(stream=input_pdf_bytesio.getvalue(), filetype="pdf")
|
||||
|
||||
if len(pdf_document) != 1:
|
||||
raise Exception("Ошибка. В документе должна быть одна страница.")
|
||||
|
||||
page = pdf_document[0]
|
||||
target_ratio = 29 / 20
|
||||
actual_ratio = page.rect.width / page.rect.height
|
||||
|
||||
if abs(actual_ratio - target_ratio) < 0.1:
|
||||
return input_pdf_bytesio
|
||||
|
||||
rect = PdfMaker._get_target_rect(page, target_ratio)
|
||||
page.set_mediabox(rect)
|
||||
page.set_cropbox(rect)
|
||||
page.set_bleedbox(rect)
|
||||
page.set_trimbox(rect)
|
||||
|
||||
pdf_document.save(output_pdf)
|
||||
pdf_document.close()
|
||||
|
||||
output_pdf.seek(0)
|
||||
return output_pdf
|
||||
21
barcodes/types.py
Normal file
21
barcodes/types.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from typing import TypedDict
|
||||
|
||||
from models import BarcodeTemplate, Product
|
||||
|
||||
|
||||
class BarcodeData(TypedDict):
|
||||
barcode: str
|
||||
template: BarcodeTemplate
|
||||
product: Product
|
||||
num_duplicates: int
|
||||
|
||||
|
||||
class PdfBarcodeGenData(TypedDict):
|
||||
barcode_value: str
|
||||
text: str
|
||||
num_duplicates: int
|
||||
|
||||
|
||||
class PdfBarcodeImageGenData(TypedDict):
|
||||
num_duplicates: int
|
||||
barcode_image_url: str
|
||||
2
build-docker.sh
Executable file
2
build-docker.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
docker build -t git.denco.store/fakz9/fulfillment-backend:latest .
|
||||
docker push git.denco.store/fakz9/fulfillment-backend:latest
|
||||
1
card_attributes/__init__.py
Normal file
1
card_attributes/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .handlers import *
|
||||
2
card_attributes/exceptions.py
Normal file
2
card_attributes/exceptions.py
Normal file
@@ -0,0 +1,2 @@
|
||||
class CardAttributeException(Exception):
|
||||
pass
|
||||
2
card_attributes/handlers/__init__.py
Normal file
2
card_attributes/handlers/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
from .card_attributes_query_handler import CardAttributesQueryHandler
|
||||
from .card_attributes_command_handler import CardAttributesCommandHandler
|
||||
6
card_attributes/handlers/base_handler.py
Normal file
6
card_attributes/handlers/base_handler.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
|
||||
class BaseHandler:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
119
card_attributes/handlers/card_attributes_command_handler.py
Normal file
119
card_attributes/handlers/card_attributes_command_handler.py
Normal file
@@ -0,0 +1,119 @@
|
||||
import pickle
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select, and_
|
||||
|
||||
from card_attributes.exceptions import CardAttributeException
|
||||
from card_attributes.handlers.base_handler import BaseHandler
|
||||
from models import CardAttribute, Attribute, Card, Project, Board
|
||||
from .card_attributes_query_handler import CardAttributesQueryHandler
|
||||
|
||||
|
||||
class CardAttributesCommandHandler(BaseHandler):
|
||||
async def _create_card_attribute(self, card_id: int, attribute_id: int, value):
|
||||
card_attribute = CardAttribute(
|
||||
card_id=card_id,
|
||||
attribute_id=attribute_id,
|
||||
)
|
||||
card_attribute.set_value(value)
|
||||
self.session.add(card_attribute)
|
||||
await self.session.flush()
|
||||
|
||||
async def _set_attribute_after_creation(self, card_id: int, project_attr: Attribute, attributes: Optional[dict] = None):
|
||||
if attributes and project_attr.name in attributes:
|
||||
passed_value = attributes[project_attr.name]
|
||||
return await self._create_card_attribute(card_id, project_attr.id, passed_value)
|
||||
|
||||
if project_attr.default_value:
|
||||
default_value = pickle.loads(project_attr.default_value)
|
||||
return await self._create_card_attribute(card_id, project_attr.id, default_value)
|
||||
|
||||
if project_attr.is_nullable:
|
||||
return await self._create_card_attribute(card_id, project_attr.id, None)
|
||||
|
||||
raise CardAttributeException("Required value was not provided")
|
||||
|
||||
async def set_attributes_after_creation(self, card: Card, attributes: Optional[dict] = None):
|
||||
query_handler = CardAttributesQueryHandler(self.session)
|
||||
board: Optional[Board] = await self.session.get(Board, card.board_id)
|
||||
if not board:
|
||||
return
|
||||
project_attrs = await query_handler.get_attributes_for_project(board.project_id)
|
||||
|
||||
try:
|
||||
for project_attr in project_attrs:
|
||||
await self._set_attribute_after_creation(card.id, project_attr, attributes)
|
||||
except CardAttributeException:
|
||||
raise
|
||||
|
||||
async def _set_card_attribute(self, card_id: int, attribute_name: str, value):
|
||||
query_handler = CardAttributesQueryHandler(self.session)
|
||||
attribute = await query_handler.get_attr_by_name(attribute_name)
|
||||
if not attribute:
|
||||
raise CardAttributeException(f"Attribute [{attribute_name}] not found")
|
||||
|
||||
stmt = (
|
||||
select(CardAttribute)
|
||||
.where(
|
||||
and_(
|
||||
CardAttribute.card_id == card_id,
|
||||
CardAttribute.attribute_id == attribute.id,
|
||||
)
|
||||
)
|
||||
)
|
||||
card_attribute: Optional[CardAttribute] = (await self.session.scalars(stmt)).one_or_none()
|
||||
|
||||
if not card_attribute:
|
||||
await self._create_card_attribute(card_id, attribute.id, value)
|
||||
else:
|
||||
card_attribute.set_value(value)
|
||||
|
||||
async def set_attr_for_each_card_in_group(self, group_id: int, attribute_name: str, value):
|
||||
query_handler = CardAttributesQueryHandler(self.session)
|
||||
card_ids: list[int] = await query_handler.get_card_ids_by_group_id(group_id)
|
||||
|
||||
for card_id in card_ids:
|
||||
await self._set_card_attribute(card_id, attribute_name, value)
|
||||
|
||||
async def set_attributes(self, card: Card, attributes: Optional[dict] = None):
|
||||
query_handler = CardAttributesQueryHandler(self.session)
|
||||
board: Optional[Board] = await self.session.get(Board, card.board_id)
|
||||
if not board:
|
||||
return
|
||||
project_attrs: list[Attribute] = await query_handler.get_attributes_for_project(board.project_id)
|
||||
|
||||
try:
|
||||
for attr_name, attr_value in attributes.items():
|
||||
try:
|
||||
attr = next(attr for attr in project_attrs if attr.name == attr_name)
|
||||
if attr.is_applicable_to_group and card.group:
|
||||
await self.set_attr_for_each_card_in_group(card.group.id, attr_name, attr_value)
|
||||
else:
|
||||
await self._set_card_attribute(card.id, attr_name, attr_value)
|
||||
except StopIteration:
|
||||
pass
|
||||
except CardAttributeException:
|
||||
raise
|
||||
|
||||
async def set_project_attributes(self, project: Project, attribute_ids: list[int]):
|
||||
query_handler = CardAttributesQueryHandler(self.session)
|
||||
attributes = await query_handler.get_attributes_by_ids(attribute_ids)
|
||||
|
||||
attributes_to_create = []
|
||||
for attribute in attributes:
|
||||
project_attr = await query_handler.get_project_attr(project.id, attribute.id)
|
||||
if not project_attr:
|
||||
attributes_to_create.append(attribute)
|
||||
|
||||
async for card in query_handler.get_all_cards_for_project(project.id):
|
||||
await self._add_attributes_to_card(card, attributes_to_create)
|
||||
|
||||
project.attributes = attributes
|
||||
await self.session.commit()
|
||||
|
||||
async def _add_attributes_to_card(self, card: Card, attributes_to_create: list[Attribute]):
|
||||
card_attribute_ids: set[int] = set((attr.attribute_id for attr in card.attributes))
|
||||
|
||||
for attribute in attributes_to_create:
|
||||
if attribute.id not in card_attribute_ids:
|
||||
await self._set_attribute_after_creation(card.id, attribute)
|
||||
78
card_attributes/handlers/card_attributes_query_handler.py
Normal file
78
card_attributes/handlers/card_attributes_query_handler.py
Normal file
@@ -0,0 +1,78 @@
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncResult
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from card_attributes.handlers.base_handler import BaseHandler
|
||||
from models import Attribute, project_attribute, card_relations, Card, Project, Board
|
||||
|
||||
|
||||
class CardAttributesQueryHandler(BaseHandler):
|
||||
async def get_attributes_for_project(self, project_id: int) -> list[Attribute]:
|
||||
stmt = (
|
||||
select(Attribute)
|
||||
.join(project_attribute, project_attribute.c.attribute_id == Attribute.id)
|
||||
.where(
|
||||
project_attribute.c.project_id == project_id,
|
||||
Attribute.is_deleted == False,
|
||||
)
|
||||
)
|
||||
attributes = (await self.session.scalars(stmt)).all()
|
||||
return list(attributes)
|
||||
|
||||
async def get_attr_by_name(self, attr_name: str) -> Optional[Attribute]:
|
||||
stmt = (
|
||||
select(Attribute)
|
||||
.options(
|
||||
selectinload(Attribute.projects),
|
||||
)
|
||||
.where(
|
||||
Attribute.name == attr_name,
|
||||
Attribute.is_deleted == False,
|
||||
)
|
||||
)
|
||||
attribute = (await self.session.scalars(stmt)).first()
|
||||
return attribute
|
||||
|
||||
async def get_card_ids_by_group_id(self, group_id: int) -> list[int]:
|
||||
stmt = (
|
||||
select(card_relations.c.card_id)
|
||||
.where(card_relations.c.group_id == group_id)
|
||||
)
|
||||
ids = await self.session.scalars(stmt)
|
||||
return list(ids)
|
||||
|
||||
async def get_all_cards_for_project(self, project_id: int) -> AsyncResult[Card]:
|
||||
stmt = (
|
||||
select(Card)
|
||||
.join(Board)
|
||||
.join(Project)
|
||||
.where(Project.id == project_id)
|
||||
.options(selectinload(Card.attributes))
|
||||
.execution_options(yield_per=100)
|
||||
)
|
||||
rows: AsyncResult[tuple[Card]] = await self.session.stream(stmt)
|
||||
async for row in rows:
|
||||
yield row[0]
|
||||
|
||||
async def get_project_attr(self, project_id: int, attribute_id: int) -> project_attribute:
|
||||
stmt_is_attribute_already_added = (
|
||||
select(project_attribute)
|
||||
.where(
|
||||
and_(
|
||||
project_attribute.c.project_id == project_id,
|
||||
project_attribute.c.attribute_id == attribute_id,
|
||||
)
|
||||
)
|
||||
)
|
||||
project_attribute_inst = await self.session.execute(stmt_is_attribute_already_added)
|
||||
return project_attribute_inst.first()
|
||||
|
||||
async def get_attributes_by_ids(self, attribute_ids: list[int]) -> list[Attribute]:
|
||||
stmt = (
|
||||
select(Attribute)
|
||||
.where(Attribute.id.in_(attribute_ids))
|
||||
)
|
||||
attributes = (await self.session.scalars(stmt)).all()
|
||||
return list(attributes)
|
||||
23
constants.py
23
constants.py
@@ -10,19 +10,27 @@ from utils.code128 import encode128
|
||||
ENV = Environment(loader=FileSystemLoader(Path("templates") / Path("documents")))
|
||||
ENV.globals['now'] = datetime.now
|
||||
ENV.globals['encode128'] = encode128
|
||||
ENV.globals['format_number'] = lambda x: '{:,}'.format(x).replace(',', ' ')
|
||||
|
||||
DOMAIN_NAME = "crm.denco.store"
|
||||
API_ROOT = "/api"
|
||||
|
||||
APP_PATH = os.path.dirname(sys.executable) if getattr(sys, 'frozen', False) else os.path.dirname(__file__)
|
||||
|
||||
STATIC_PATH = os.path.join(APP_PATH, "static")
|
||||
|
||||
KAFKA_CERTS_PATH = os.path.join(APP_PATH, "certs")
|
||||
|
||||
allowed_telegram_ids = [
|
||||
7532624817, # Me
|
||||
816217667, # Igor
|
||||
6671635397, # Nikita
|
||||
355308397, # SerGey
|
||||
5734685107, # Seller manager,
|
||||
355242295, # Dsnonchik
|
||||
651158209, # kristina
|
||||
502869937, # Sasha
|
||||
7326211785
|
||||
1006239222, # Sasha Serbin
|
||||
7326211785,
|
||||
6427522679, # Ekaterina Manager
|
||||
5734685107 # Kristina v2
|
||||
]
|
||||
|
||||
MONTHS = (
|
||||
@@ -49,3 +57,10 @@ DEAL_STATUS_STR = [
|
||||
'Завершено',
|
||||
'Отменено',
|
||||
]
|
||||
|
||||
# 30 minutes in seconds
|
||||
INVITE_CODE_EXPIRY = 30 * 60
|
||||
|
||||
|
||||
DEAL_EDITOR = 'deal_editor'
|
||||
DEALS_VIEWER = 'deals_viewer'
|
||||
|
||||
13
enums/profit_table_group_by.py
Normal file
13
enums/profit_table_group_by.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from enum import IntEnum
|
||||
|
||||
|
||||
class ProfitTableGroupBy(IntEnum):
|
||||
BY_DATES = 0
|
||||
BY_CLIENTS = 1
|
||||
BY_PROJECTS = 2
|
||||
BY_BOARDS = 3
|
||||
BY_STATUSES = 4
|
||||
BY_WAREHOUSES = 5
|
||||
BY_MARKETPLACES = 6
|
||||
BY_MANAGERS = 7
|
||||
BY_TAGS = 8
|
||||
3
external/billing/billing_client.py
vendored
3
external/billing/billing_client.py
vendored
@@ -1,5 +1,6 @@
|
||||
import aiohttp
|
||||
|
||||
from backend.config import BILLING_URL
|
||||
from .schemas import *
|
||||
|
||||
|
||||
@@ -9,7 +10,7 @@ class BillingClient:
|
||||
self.headers = {
|
||||
'Authorization': f'Bearer {self.api_key}'
|
||||
}
|
||||
self.base_url = 'https://billing.denco.store'
|
||||
self.base_url = BILLING_URL
|
||||
|
||||
async def _method(self, http_method, method, **kwargs):
|
||||
async with aiohttp.ClientSession(headers=self.headers) as session:
|
||||
|
||||
28
external/billing/schemas.py
vendored
28
external/billing/schemas.py
vendored
@@ -1,5 +1,5 @@
|
||||
import re
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import field_validator
|
||||
|
||||
@@ -25,7 +25,7 @@ class CreateBillRequestItems(BaseSchema):
|
||||
|
||||
|
||||
class CreateBillRequestSchema(BaseSchema):
|
||||
listener_transaction_id: int
|
||||
listener_transaction_id: int | str
|
||||
payer_name: str
|
||||
payer_inn: str
|
||||
payer_phone: str | None
|
||||
@@ -35,7 +35,8 @@ class CreateBillRequestSchema(BaseSchema):
|
||||
def payer_phone_validator(cls, phone: str) -> str:
|
||||
if phone is None:
|
||||
return None
|
||||
|
||||
phone = phone.replace('-', '')
|
||||
phone = phone.replace(' ', '')
|
||||
if not phone.startswith("+"):
|
||||
phone = f"+{phone}"
|
||||
|
||||
@@ -46,7 +47,7 @@ class CreateBillRequestSchema(BaseSchema):
|
||||
|
||||
|
||||
class DeleteBillRequestSchema(BaseSchema):
|
||||
listener_transaction_id: int
|
||||
listener_transaction_id: int | str
|
||||
|
||||
|
||||
class DeleteBillResponseSchema(BaseSchema):
|
||||
@@ -54,14 +55,14 @@ class DeleteBillResponseSchema(BaseSchema):
|
||||
|
||||
|
||||
class NotifyReceivedBillRequestSchema(BaseSchema):
|
||||
listener_transaction_id: int
|
||||
listener_transaction_id: int | str
|
||||
channel: NotificationChannel
|
||||
received: bool
|
||||
|
||||
|
||||
class CreateBillingResponseSchema(BaseSchema):
|
||||
ok: bool
|
||||
|
||||
message:Optional[str] = None
|
||||
|
||||
class NotifyReceivedBillResponseSchema(BaseSchema):
|
||||
ok: bool
|
||||
@@ -84,6 +85,19 @@ class BillPaymentStatus(BaseSchema):
|
||||
|
||||
|
||||
class BillStatusUpdateRequest(BaseSchema):
|
||||
listener_transaction_id: int
|
||||
listener_transaction_id: int | str
|
||||
channel: NotificationChannel
|
||||
info: BillPaymentInfo | BillPaymentStatus
|
||||
|
||||
|
||||
class ServiceBillingDocumentPdf(BaseSchema):
|
||||
name: str = ""
|
||||
price: int
|
||||
quantity: int
|
||||
|
||||
|
||||
class ProductBillingDocumentPdf(BaseSchema):
|
||||
article: str = ""
|
||||
size: str = ""
|
||||
price: int
|
||||
quantity: int
|
||||
|
||||
0
external/chat/__init__.py
vendored
Normal file
0
external/chat/__init__.py
vendored
Normal file
55
external/chat/chat_client.py
vendored
Normal file
55
external/chat/chat_client.py
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
import aiohttp
|
||||
import jwt
|
||||
from fastapi import UploadFile
|
||||
|
||||
from backend.config import CHATS_SYNC_URL, CHAT_CONNECTOR_API_KEY
|
||||
from external.chat.schemas import *
|
||||
from services.auth import algorithm
|
||||
|
||||
|
||||
class ChatClient:
|
||||
def __init__(self, api_key: str):
|
||||
self.api_key = api_key
|
||||
self.headers = {
|
||||
'Authorization': 'Bearer ' + self.create_jwt_token()
|
||||
}
|
||||
self.base_url = CHATS_SYNC_URL
|
||||
self.chats_sync_endpoint = '/chats-sync'
|
||||
self.groups_endpoint = '/group'
|
||||
|
||||
def create_jwt_token(self):
|
||||
return jwt.encode({'sub': self.api_key}, CHAT_CONNECTOR_API_KEY, algorithm=algorithm)
|
||||
|
||||
async def _method(self, http_method, method, **kwargs):
|
||||
async with aiohttp.ClientSession(headers=self.headers) as session:
|
||||
async with session.request(http_method, self.base_url + method, **kwargs) as response:
|
||||
return await response.json()
|
||||
|
||||
async def create_group(self, request: ExternalCreateGroupRequest) -> ExternalCreateGroupResponse:
|
||||
json_data = request.model_dump()
|
||||
response = await self._method('POST', self.groups_endpoint + '/create', json=json_data)
|
||||
return ExternalCreateGroupResponse.model_validate(response)
|
||||
|
||||
async def create_topic(self, request: ExternalCreateTopicRequest) -> ExternalCreateTopicResponse:
|
||||
json_data = request.model_dump()
|
||||
response = await self._method('POST', self.groups_endpoint + '/topic/create', json=json_data)
|
||||
return ExternalCreateTopicResponse.model_validate(response)
|
||||
|
||||
async def send_messages_with_files(
|
||||
self,
|
||||
tg_group_id: str,
|
||||
tg_topic_id: int,
|
||||
caption: str,
|
||||
files: list[UploadFile],
|
||||
) -> ExternalSendMessagesWithFilesResponse:
|
||||
query_params = f'?tg_group_id={tg_group_id}&tg_topic_id={tg_topic_id}&caption={caption}'
|
||||
|
||||
data = aiohttp.FormData(default_to_multipart=True)
|
||||
|
||||
for file in files:
|
||||
content = await file.read()
|
||||
data.add_field('files', content, filename=file.filename, content_type=file.content_type)
|
||||
|
||||
response = await self._method('POST', self.chats_sync_endpoint + '/send' + query_params, data=data)
|
||||
|
||||
return ExternalSendMessagesWithFilesResponse.model_validate(response)
|
||||
47
external/chat/schemas.py
vendored
Normal file
47
external/chat/schemas.py
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from schemas.base import BaseSchema, OkMessageSchema
|
||||
|
||||
|
||||
# region Entities
|
||||
|
||||
class ExternalMessageFileSchema(BaseSchema):
|
||||
file_path: str
|
||||
type: str
|
||||
file_name: str
|
||||
file_size: int
|
||||
|
||||
|
||||
# endregion
|
||||
|
||||
# region Requests
|
||||
|
||||
class ExternalCreateGroupRequest(BaseSchema):
|
||||
title: str
|
||||
|
||||
|
||||
class ExternalCreateTopicRequest(BaseSchema):
|
||||
group_id: str
|
||||
title: str
|
||||
icon_emoji_id: Optional[int] = None
|
||||
|
||||
|
||||
# endregion
|
||||
|
||||
# region Responses
|
||||
|
||||
class ExternalCreateGroupResponse(BaseSchema):
|
||||
tg_group_id: int
|
||||
group_id: UUID
|
||||
tg_invite_link: str
|
||||
|
||||
|
||||
class ExternalCreateTopicResponse(BaseSchema):
|
||||
tg_topic_id: int
|
||||
|
||||
|
||||
class ExternalSendMessagesWithFilesResponse(OkMessageSchema):
|
||||
files: list[ExternalMessageFileSchema]
|
||||
|
||||
# endregion
|
||||
1
external/kafka/__init__.py
vendored
Normal file
1
external/kafka/__init__.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
from .consumer import consume_messages
|
||||
31
external/kafka/consumer.py
vendored
Normal file
31
external/kafka/consumer.py
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
from aiokafka import AIOKafkaConsumer
|
||||
from aiokafka.errors import KafkaConnectionError
|
||||
|
||||
from backend.config import KAFKA_URL, KAFKA_CONSUMER_TOPIC, KAFKA_ENABLE_SSL
|
||||
from backend.session import session_maker
|
||||
from external.kafka.context import context
|
||||
from external.kafka.services.consumer_service import ConsumerService
|
||||
|
||||
|
||||
async def consume_messages():
|
||||
consumer = AIOKafkaConsumer(
|
||||
KAFKA_CONSUMER_TOPIC,
|
||||
bootstrap_servers=KAFKA_URL,
|
||||
group_id='crm',
|
||||
security_protocol='SSL' if KAFKA_ENABLE_SSL else 'PLAINTEXT',
|
||||
ssl_context=context if KAFKA_ENABLE_SSL else None,
|
||||
)
|
||||
try:
|
||||
await consumer.start()
|
||||
except KafkaConnectionError as e:
|
||||
print(e)
|
||||
return
|
||||
|
||||
async with session_maker() as session:
|
||||
consumer_service = ConsumerService(session)
|
||||
|
||||
try:
|
||||
async for message in consumer:
|
||||
await consumer_service.consume_message(message)
|
||||
finally:
|
||||
await consumer.stop()
|
||||
11
external/kafka/context.py
vendored
Normal file
11
external/kafka/context.py
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
from pathlib import Path
|
||||
|
||||
from aiokafka.helpers import create_ssl_context
|
||||
|
||||
from constants import KAFKA_CERTS_PATH
|
||||
|
||||
context = create_ssl_context(
|
||||
cafile=KAFKA_CERTS_PATH / Path('ca-cert'),
|
||||
certfile=KAFKA_CERTS_PATH / Path('cert-signed'),
|
||||
keyfile=KAFKA_CERTS_PATH / Path('cert-key'),
|
||||
)
|
||||
7
external/kafka/enums.py
vendored
Normal file
7
external/kafka/enums.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import enum
|
||||
|
||||
|
||||
class KafkaMessageType(enum.Enum):
|
||||
SEND = 1
|
||||
EDIT = 2
|
||||
DELETE = 3
|
||||
22
external/kafka/producer.py
vendored
Normal file
22
external/kafka/producer.py
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
from typing import Optional
|
||||
|
||||
from aiokafka import AIOKafkaProducer
|
||||
|
||||
from backend.config import KAFKA_URL, KAFKA_ENABLE_SSL
|
||||
from external.kafka.context import context
|
||||
|
||||
_producer: Optional[AIOKafkaProducer] = None
|
||||
|
||||
|
||||
async def init_producer():
|
||||
global _producer
|
||||
_producer = AIOKafkaProducer(
|
||||
bootstrap_servers=KAFKA_URL,
|
||||
security_protocol='SSL' if KAFKA_ENABLE_SSL else 'PLAINTEXT',
|
||||
ssl_context=context if KAFKA_ENABLE_SSL else None,
|
||||
)
|
||||
|
||||
|
||||
async def get_producer() -> Optional[AIOKafkaProducer]:
|
||||
global _producer
|
||||
return _producer
|
||||
58
external/kafka/schemas/consumer.py
vendored
Normal file
58
external/kafka/schemas/consumer.py
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
from typing import Optional
|
||||
|
||||
from schemas.base import OkMessageSchema, BaseSchema
|
||||
|
||||
|
||||
# region Entities
|
||||
|
||||
class TelegramUserSchema(BaseSchema):
|
||||
id: int
|
||||
first_name: Optional[str] = None
|
||||
last_name: Optional[str] = None
|
||||
username: Optional[str] = None
|
||||
|
||||
|
||||
class MessageFileSchema(BaseSchema):
|
||||
file_path: str
|
||||
type: str
|
||||
file_name: str
|
||||
file_size: int
|
||||
|
||||
|
||||
class MessageFromTelegramSchema(BaseSchema):
|
||||
group_id: str
|
||||
tg_topic_id: int
|
||||
text: Optional[str]
|
||||
sender: TelegramUserSchema
|
||||
file: Optional[MessageFileSchema]
|
||||
|
||||
|
||||
# endregion
|
||||
|
||||
# region Requests
|
||||
|
||||
class MessageFromTelegramRequest(BaseSchema):
|
||||
message: MessageFromTelegramSchema
|
||||
|
||||
|
||||
# endregion
|
||||
|
||||
# region Responses
|
||||
|
||||
class BaseConnectorResponse(OkMessageSchema):
|
||||
message_type: int
|
||||
message_id: int
|
||||
|
||||
|
||||
class SendMessageToConnectorResponse(BaseConnectorResponse):
|
||||
tg_message_id: Optional[int] = None
|
||||
|
||||
|
||||
class DeleteMessageResponse(BaseConnectorResponse):
|
||||
pass
|
||||
|
||||
|
||||
class EditMessageResponse(BaseConnectorResponse):
|
||||
text: str
|
||||
|
||||
# endregion
|
||||
43
external/kafka/schemas/producer.py
vendored
Normal file
43
external/kafka/schemas/producer.py
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
from typing import Optional
|
||||
|
||||
from schemas.base import BaseSchema
|
||||
|
||||
|
||||
# region Entities
|
||||
|
||||
class BaseMessageSchema(BaseSchema):
|
||||
message_id: int
|
||||
tg_message_id: Optional[int] = None
|
||||
group_id: str
|
||||
|
||||
|
||||
class EditMessageSchema(BaseMessageSchema):
|
||||
text: str
|
||||
|
||||
|
||||
class MessageSchema(EditMessageSchema):
|
||||
topic_id: int
|
||||
|
||||
|
||||
# endregion
|
||||
|
||||
# region Requests
|
||||
|
||||
class BaseConnectorRequest(BaseSchema):
|
||||
message_type: int
|
||||
app_auth_key: str
|
||||
message: BaseMessageSchema
|
||||
|
||||
|
||||
class SendMessageToConnectorRequest(BaseConnectorRequest):
|
||||
message: MessageSchema
|
||||
|
||||
|
||||
class SendMessageDeletingToConnectorRequest(BaseConnectorRequest):
|
||||
pass
|
||||
|
||||
|
||||
class SendMessageEditingToConnectorRequest(BaseConnectorRequest):
|
||||
message: EditMessageSchema
|
||||
|
||||
# endregion
|
||||
107
external/kafka/services/consumer_service.py
vendored
Normal file
107
external/kafka/services/consumer_service.py
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
import pickle
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from aiokafka import ConsumerRecord
|
||||
from sqlalchemy import select
|
||||
|
||||
from external.kafka.enums import KafkaMessageType
|
||||
from external.kafka.schemas.consumer import *
|
||||
from models import Message, MessageStatus, TgUser, Chat, TgGroup, MessageFile
|
||||
from services.base import BaseService
|
||||
|
||||
|
||||
class ConsumerService(BaseService):
|
||||
async def consume_message(self, message: ConsumerRecord):
|
||||
value = pickle.loads(message.value)
|
||||
|
||||
try:
|
||||
if 'ok' in value:
|
||||
""" Received response after sending message from crm """
|
||||
await self._process_connector_response(message)
|
||||
else:
|
||||
""" Received message from client """
|
||||
request = MessageFromTelegramRequest.model_validate(value)
|
||||
await self._receive_message_from_client(request)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
async def _process_connector_response(self, message: ConsumerRecord):
|
||||
value = pickle.loads(message.value)
|
||||
message_type = KafkaMessageType(value['message_type'])
|
||||
|
||||
match message_type:
|
||||
case KafkaMessageType.SEND:
|
||||
response = SendMessageToConnectorResponse.model_validate(value)
|
||||
await self._process_connector_send_response(response)
|
||||
case KafkaMessageType.EDIT:
|
||||
response = EditMessageResponse.model_validate(value)
|
||||
await self._process_connector_edit_response(response)
|
||||
case KafkaMessageType.DELETE:
|
||||
response = DeleteMessageResponse.model_validate(value)
|
||||
await self._process_connector_delete_response(response)
|
||||
case _:
|
||||
raise Exception('Unexpected message type in crm consumer')
|
||||
|
||||
async def _process_connector_send_response(self, response: SendMessageToConnectorResponse):
|
||||
message = await self.session.get(Message, response.message_id)
|
||||
message.tg_message_id = response.tg_message_id
|
||||
if response.ok:
|
||||
message.status = MessageStatus.success
|
||||
else:
|
||||
message.status = MessageStatus.error
|
||||
|
||||
await self.session.commit()
|
||||
|
||||
async def _process_connector_delete_response(self, response: DeleteMessageResponse):
|
||||
if not response.ok:
|
||||
return
|
||||
|
||||
message = await self.session.get(Message, response.message_id)
|
||||
message.is_deleted = True
|
||||
await self.session.commit()
|
||||
|
||||
async def _process_connector_edit_response(self, response: EditMessageResponse):
|
||||
if not response.ok:
|
||||
return
|
||||
|
||||
message = await self.session.get(Message, response.message_id)
|
||||
message.text = response.text
|
||||
message.is_edited = True
|
||||
await self.session.commit()
|
||||
|
||||
async def _get_chat(self, group_id: str, tg_topic_id: int) -> Optional[Chat]:
|
||||
stmt = (
|
||||
select(Chat)
|
||||
.join(TgGroup)
|
||||
.where(
|
||||
Chat.tg_topic_id == tg_topic_id,
|
||||
TgGroup.id == UUID(group_id),
|
||||
)
|
||||
)
|
||||
return (await self.session.scalars(stmt)).one_or_none()
|
||||
|
||||
async def _receive_message_from_client(self, request: MessageFromTelegramRequest):
|
||||
tg_sender: Optional[TgUser] = await self.session.get(TgUser, request.message.sender.id)
|
||||
if not tg_sender:
|
||||
tg_sender: TgUser = TgUser(**request.message.sender.model_dump())
|
||||
self.session.add(tg_sender)
|
||||
|
||||
chat = await self._get_chat(request.message.group_id, request.message.tg_topic_id)
|
||||
if not chat:
|
||||
return
|
||||
|
||||
file = None
|
||||
if request.message.file:
|
||||
file = MessageFile(**request.message.file.model_dump())
|
||||
|
||||
message = Message(
|
||||
text=request.message.text if request.message.text else "",
|
||||
created_at=datetime.now(),
|
||||
tg_sender_id=tg_sender.id,
|
||||
chat_id=chat.id,
|
||||
status=MessageStatus.success,
|
||||
file=file,
|
||||
)
|
||||
self.session.add(message)
|
||||
await self.session.commit()
|
||||
78
external/kafka/services/producer_service.py
vendored
Normal file
78
external/kafka/services/producer_service.py
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
import pickle
|
||||
from uuid import UUID
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
|
||||
from backend.config import KAFKA_PRODUCER_TOPIC, CHAT_CONNECTOR_API_KEY
|
||||
from external.kafka.enums import KafkaMessageType
|
||||
from external.kafka.producer import get_producer
|
||||
from external.kafka.schemas.producer import *
|
||||
from services.base import BaseService
|
||||
|
||||
|
||||
class ProducerService(BaseService):
|
||||
@staticmethod
|
||||
async def _send_message(request: BaseConnectorRequest):
|
||||
try:
|
||||
producer = await get_producer()
|
||||
await producer.send(KAFKA_PRODUCER_TOPIC, value=pickle.dumps(request.model_dump()))
|
||||
except ClientConnectorError:
|
||||
return False, 'Ошибка подключения к сервису'
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, 'Сообщение отправлено'
|
||||
|
||||
@staticmethod
|
||||
async def send_message_to_connector(
|
||||
message_text: str,
|
||||
group_id: UUID,
|
||||
topic_id: int,
|
||||
message_id: int,
|
||||
) -> tuple[bool, str]:
|
||||
request = SendMessageToConnectorRequest(
|
||||
message_type=KafkaMessageType.SEND,
|
||||
app_auth_key=CHAT_CONNECTOR_API_KEY,
|
||||
message=MessageSchema(
|
||||
message_id=message_id,
|
||||
text=message_text,
|
||||
group_id=str(group_id),
|
||||
topic_id=topic_id,
|
||||
),
|
||||
)
|
||||
return await ProducerService._send_message(request)
|
||||
|
||||
@staticmethod
|
||||
async def send_message_deleting_to_connector(
|
||||
message_id: int,
|
||||
tg_message_id: int,
|
||||
group_id: UUID,
|
||||
) -> tuple[bool, str]:
|
||||
request = SendMessageDeletingToConnectorRequest(
|
||||
message_type=KafkaMessageType.DELETE,
|
||||
app_auth_key=CHAT_CONNECTOR_API_KEY,
|
||||
message=BaseMessageSchema(
|
||||
message_id=message_id,
|
||||
tg_message_id=tg_message_id,
|
||||
group_id=str(group_id),
|
||||
),
|
||||
)
|
||||
return await ProducerService._send_message(request)
|
||||
|
||||
@staticmethod
|
||||
async def send_message_editing_to_connector(
|
||||
message_id: int,
|
||||
tg_message_id: int,
|
||||
group_id: UUID,
|
||||
text: str,
|
||||
) -> tuple[bool, str]:
|
||||
request = SendMessageEditingToConnectorRequest(
|
||||
message_type=KafkaMessageType.EDIT,
|
||||
app_auth_key=CHAT_CONNECTOR_API_KEY,
|
||||
message=EditMessageSchema(
|
||||
message_id=message_id,
|
||||
tg_message_id=tg_message_id,
|
||||
group_id=str(group_id),
|
||||
text=text,
|
||||
),
|
||||
)
|
||||
return await ProducerService._send_message(request)
|
||||
8
external/marketplace/ozon/core.py
vendored
8
external/marketplace/ozon/core.py
vendored
@@ -25,7 +25,7 @@ class OzonMarketplaceApi(BaseMarketplaceApi):
|
||||
return "https://api-seller.ozon.ru"
|
||||
|
||||
async def get_products(self, data: dict) -> dict:
|
||||
method = '/v2/product/list'
|
||||
method = '/v3/product/list'
|
||||
response = await self._method('POST', method, json=data)
|
||||
return response
|
||||
|
||||
@@ -36,7 +36,9 @@ class OzonMarketplaceApi(BaseMarketplaceApi):
|
||||
data = {
|
||||
'limit': limit,
|
||||
'last_id': last_id,
|
||||
|
||||
'filter':{
|
||||
'visibility': 'ALL',
|
||||
}
|
||||
}
|
||||
response = await self.get_products(data)
|
||||
if not response:
|
||||
@@ -54,7 +56,7 @@ class OzonMarketplaceApi(BaseMarketplaceApi):
|
||||
break
|
||||
|
||||
async def get_products_info(self, data: dict) -> dict:
|
||||
method = '/v2/product/info/list'
|
||||
method = '/v3/product/info/list'
|
||||
response = await self._method('POST', method, json=data)
|
||||
return response
|
||||
|
||||
|
||||
9
external/marketplace/wildberries/core.py
vendored
9
external/marketplace/wildberries/core.py
vendored
@@ -2,8 +2,7 @@ import time
|
||||
from enum import StrEnum
|
||||
from typing import AsyncIterator
|
||||
|
||||
from async_timeout import timeout
|
||||
|
||||
from backend.config import WB_SECRET_TOKEN
|
||||
from external.marketplace.base.core import BaseMarketplaceApi
|
||||
from models import Marketplace
|
||||
|
||||
@@ -34,7 +33,11 @@ class WildberriesMarketplaceApi(BaseMarketplaceApi):
|
||||
"Please check the marketplace credentials."
|
||||
)
|
||||
self.token = token
|
||||
self.headers = {'Authorization': token}
|
||||
self.headers = {
|
||||
'Authorization': token,
|
||||
'User-Agent': 'wbas_seller.denco.store3547',
|
||||
'X-Client-Secret': WB_SECRET_TOKEN
|
||||
}
|
||||
self.marketplace = marketplace
|
||||
|
||||
@property
|
||||
|
||||
119
external/marketplace/yandex/core.py
vendored
119
external/marketplace/yandex/core.py
vendored
@@ -1,15 +1,128 @@
|
||||
from typing import AsyncIterator, Optional
|
||||
|
||||
from external.marketplace.base.core import BaseMarketplaceApi
|
||||
from models import Marketplace
|
||||
from utils.list_utils import chunk_list
|
||||
|
||||
|
||||
class YandexMarketplaceApi(BaseMarketplaceApi):
|
||||
def __init__(self, marketplace: Marketplace):
|
||||
pass
|
||||
token = marketplace.auth_data.get('Api-Key')
|
||||
if not token:
|
||||
raise ValueError(
|
||||
f"Authorization token is missing for Marketplace ID: {marketplace.id}. "
|
||||
"Please check the marketplace credentials."
|
||||
)
|
||||
self.token = token
|
||||
self.headers = {'Api-Key': token}
|
||||
self.marketplace = marketplace
|
||||
|
||||
@property
|
||||
def get_headers(self) -> dict:
|
||||
return {}
|
||||
return self.headers
|
||||
|
||||
@property
|
||||
def base_url(self) -> str:
|
||||
return ""
|
||||
return 'https://api.partner.market.yandex.ru'
|
||||
|
||||
def _get_campaign_id(self) -> Optional[int]:
|
||||
campaign_id: Optional[str] = self.marketplace.auth_data.get('CampaignId')
|
||||
if not campaign_id:
|
||||
return None
|
||||
if not str(campaign_id).isdigit():
|
||||
return None
|
||||
return int(campaign_id)
|
||||
|
||||
async def get_campaigns(self) -> AsyncIterator[dict]:
|
||||
method = '/campaigns'
|
||||
page = 1
|
||||
while True:
|
||||
params = {
|
||||
'page': page,
|
||||
}
|
||||
response = await self._method('GET', method, params=params)
|
||||
if not response:
|
||||
break
|
||||
campaigns = response.get('campaigns')
|
||||
if not campaigns:
|
||||
break
|
||||
for campaign in campaigns:
|
||||
yield campaign
|
||||
|
||||
pager = response.get('pager')
|
||||
if not pager:
|
||||
break
|
||||
pages_count = pager.get('pagesCount')
|
||||
if not pages_count:
|
||||
break
|
||||
if page >= pages_count:
|
||||
break
|
||||
page += 1
|
||||
|
||||
async def get_business_id(self, campaign_id: int) -> Optional[int]:
|
||||
async for campaign in self.get_campaigns():
|
||||
if campaign['id'] == campaign_id:
|
||||
return campaign['business']['id']
|
||||
return None
|
||||
|
||||
async def get_all_products(self) -> AsyncIterator[dict]:
|
||||
campaign_id = self._get_campaign_id()
|
||||
if not campaign_id:
|
||||
return
|
||||
business_id = await self.get_business_id(campaign_id)
|
||||
if not business_id:
|
||||
return
|
||||
|
||||
method = f'/businesses/{business_id}/offer-mappings'
|
||||
limit = 200
|
||||
page_token = ''
|
||||
while True:
|
||||
params = {
|
||||
'limit': limit,
|
||||
'page_token': page_token,
|
||||
}
|
||||
response = await self._method('POST', method, params=params)
|
||||
if not response:
|
||||
break
|
||||
response = response.get('result')
|
||||
if not response:
|
||||
break
|
||||
offers = response.get('offerMappings')
|
||||
if not offers:
|
||||
break
|
||||
for offer in offers:
|
||||
yield offer
|
||||
paging = response.get('paging')
|
||||
if not paging:
|
||||
break
|
||||
|
||||
next_page_token = paging.get('nextPageToken')
|
||||
if not next_page_token:
|
||||
break
|
||||
page_token = next_page_token
|
||||
|
||||
async def get_products_by_offer_ids(self, offer_ids: list[str]) -> AsyncIterator[dict]:
|
||||
campaign_id = self._get_campaign_id()
|
||||
if not campaign_id:
|
||||
return
|
||||
business_id = await self.get_business_id(campaign_id)
|
||||
if not business_id:
|
||||
return
|
||||
|
||||
method = f'/businesses/{business_id}/offer-mappings'
|
||||
limit = 200
|
||||
for chunk in chunk_list(offer_ids, limit):
|
||||
params = {
|
||||
'offer_ids': chunk,
|
||||
}
|
||||
response = await self._method('POST', method, params=params)
|
||||
if not response:
|
||||
break
|
||||
response = response.get('result')
|
||||
if not response:
|
||||
break
|
||||
offers = response.get('offerMappings')
|
||||
if not offers:
|
||||
break
|
||||
for offer in offers:
|
||||
yield offer
|
||||
|
||||
0
generators/__init__.py
Normal file
0
generators/__init__.py
Normal file
@@ -0,0 +1,96 @@
|
||||
import os
|
||||
from io import BytesIO
|
||||
|
||||
from pdfrw import PdfReader, PdfWriter, PageMerge
|
||||
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
|
||||
from reportlab.lib.units import mm
|
||||
from reportlab.pdfbase import pdfmetrics
|
||||
from reportlab.pdfbase.ttfonts import TTFont
|
||||
from reportlab.platypus import SimpleDocTemplate
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from constants import APP_PATH
|
||||
|
||||
|
||||
class BasePdfCardGenerator:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self._session = session
|
||||
assets_folder = os.path.join(APP_PATH, 'assets')
|
||||
fonts_folder = os.path.join(assets_folder, 'fonts')
|
||||
|
||||
dejavu_font_file_path = os.path.join(fonts_folder, 'DejaVuSans.ttf')
|
||||
pdfmetrics.registerFont(TTFont('DejaVuSans', dejavu_font_file_path))
|
||||
|
||||
arial_font_file_path = os.path.join(fonts_folder, 'Arial Nova Cond.ttf')
|
||||
pdfmetrics.registerFont(TTFont('Arial Nova Cond', arial_font_file_path))
|
||||
|
||||
self.page_width = 58 * mm
|
||||
self.page_height = 40 * mm
|
||||
|
||||
self.styles = getSampleStyleSheet()
|
||||
self._set_small_paragraph_styles()
|
||||
self._set_medium_paragraph_styles()
|
||||
|
||||
def _set_small_paragraph_styles(self):
|
||||
common_paragraph_style = {
|
||||
"parent": self.styles['Normal'],
|
||||
"fontName": "DejaVuSans",
|
||||
"spaceAfter": 4,
|
||||
"fontSize": 9,
|
||||
}
|
||||
|
||||
self.small_style = ParagraphStyle(
|
||||
'Small',
|
||||
alignment=0,
|
||||
**common_paragraph_style,
|
||||
)
|
||||
|
||||
self.small_centered_style = ParagraphStyle(
|
||||
'SmallCentered',
|
||||
alignment=1,
|
||||
**common_paragraph_style,
|
||||
)
|
||||
|
||||
def _set_medium_paragraph_styles(self):
|
||||
self.medium_style = ParagraphStyle(
|
||||
'Medium',
|
||||
parent=self.styles['Normal'],
|
||||
fontName="DejaVuSans",
|
||||
spaceAfter=6,
|
||||
fontSize=12,
|
||||
alignment=0,
|
||||
)
|
||||
|
||||
def _create_doc(self, buffer):
|
||||
return SimpleDocTemplate(
|
||||
buffer,
|
||||
pagesize=(self.page_width, self.page_height),
|
||||
rightMargin=1,
|
||||
leftMargin=1,
|
||||
topMargin=1,
|
||||
bottomMargin=1
|
||||
)
|
||||
|
||||
def _get_paragraph_style(self, font_size: int, font_name: str = "Arial Nova Cond"):
|
||||
common_paragraph_style = {
|
||||
"fontName": font_name,
|
||||
"fontSize": font_size,
|
||||
}
|
||||
return ParagraphStyle(
|
||||
'LargeCentered',
|
||||
alignment=1,
|
||||
**common_paragraph_style,
|
||||
)
|
||||
|
||||
def _rotate_pdf(self, buffer: BytesIO, rotation: str = 90) -> BytesIO:
|
||||
reader = PdfReader(buffer)
|
||||
writer = PdfWriter()
|
||||
|
||||
for page in reader.pages:
|
||||
rotated_page = PageMerge().add(page, rotate=rotation).render()
|
||||
writer.addpage(rotated_page)
|
||||
|
||||
rotated_buffer = BytesIO()
|
||||
writer.write(rotated_buffer)
|
||||
rotated_buffer.seek(0)
|
||||
return rotated_buffer
|
||||
0
generators/deal_pdf_generator/__init__.py
Normal file
0
generators/deal_pdf_generator/__init__.py
Normal file
25
generators/deal_pdf_generator/deal_data.py
Normal file
25
generators/deal_pdf_generator/deal_data.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from typing import TypedDict, List, Dict, Tuple, Optional
|
||||
|
||||
from models import CardProduct, Card, CardStatusHistory
|
||||
|
||||
|
||||
class DealTechSpecProductData(TypedDict):
|
||||
deal: Card
|
||||
last_status: CardStatusHistory
|
||||
total_one_product: int
|
||||
quantity: int
|
||||
additional_info: Optional[str]
|
||||
|
||||
# Поле для группировки товаров с одним артикулом и вывода таблицы [Штрихкод, Размер, Кол-во, Короба]
|
||||
deal_products: List[CardProduct]
|
||||
|
||||
# Поле для группировки товаров из нескольких сделок и вывода таблицы [Склад отгрузки, Кол-во]
|
||||
warehouses_and_quantities: List[Tuple[str, int]]
|
||||
|
||||
|
||||
class DealTechSpecData(TypedDict):
|
||||
deals: List[Card]
|
||||
products: Dict[str, DealTechSpecProductData]
|
||||
product_images: Tuple
|
||||
deal_ids_header: str
|
||||
deal_status_str: list[str]
|
||||
142
generators/deal_pdf_generator/generator.py
Normal file
142
generators/deal_pdf_generator/generator.py
Normal file
@@ -0,0 +1,142 @@
|
||||
from io import BytesIO
|
||||
from typing import List, Dict, Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload, joinedload
|
||||
from weasyprint import HTML, CSS
|
||||
|
||||
from constants import DEAL_STATUS_STR, ENV, APP_PATH
|
||||
from generators.deal_pdf_generator.deal_data import DealTechSpecProductData, DealTechSpecData
|
||||
from models import Card, CardProduct, CardService as DealServiceModel, Product, CardGroup
|
||||
from services.card_group import CardGroupService
|
||||
from utils.images_fetcher import fetch_images
|
||||
|
||||
|
||||
# Генерация ключа для группировки deal_product по артикулу и услугам
|
||||
def _gen_key_for_product(deal_product: CardProduct) -> str:
|
||||
return f"{deal_product.product.article} - " + ",".join(
|
||||
str(service.service_id) for service in deal_product.services
|
||||
)
|
||||
|
||||
|
||||
# Генерация ключа для группировки deal_product из группы сделок по артикулу, сервисам, а также товарам
|
||||
def _regen_key_for_product(product_data: DealTechSpecProductData) -> str:
|
||||
if len(product_data['deal_products']) == 0:
|
||||
return ""
|
||||
|
||||
article = product_data['deal_products'][0].product.article
|
||||
services_ids = ",".join(str(service.service_id) for service in product_data['deal_products'][0].services)
|
||||
|
||||
if len(product_data['deal_products']) == 1:
|
||||
products = product_data['deal_products'][0].product_id
|
||||
else:
|
||||
products = ",".join(
|
||||
f"{deal_product.product_id}-{deal_product.quantity}" for deal_product in product_data['deal_products']
|
||||
)
|
||||
|
||||
return f"{article}+{services_ids}+{products}"
|
||||
|
||||
|
||||
class DealTechSpecPdfGenerator:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self._session = session
|
||||
self.deal_doc: DealTechSpecData = {
|
||||
"deals": [],
|
||||
"products": {},
|
||||
"product_images": (),
|
||||
"deal_ids_header": "",
|
||||
"deal_status_str": DEAL_STATUS_STR,
|
||||
}
|
||||
self.deal: Card
|
||||
|
||||
@staticmethod
|
||||
async def _group_deal_products_by_products(deal_products: List[CardProduct]) -> Dict[str, DealTechSpecProductData]:
|
||||
products: Dict[str, DealTechSpecProductData] = {}
|
||||
additional_info: Optional[str]
|
||||
|
||||
for deal_product in deal_products:
|
||||
# Для группировки по артикулу и услугам
|
||||
key = _gen_key_for_product(deal_product)
|
||||
|
||||
if key not in products:
|
||||
products[key] = {
|
||||
"deal": deal_product.card,
|
||||
"deal_products": [deal_product],
|
||||
"quantity": deal_product.quantity,
|
||||
"additional_info": deal_product.product.additional_info,
|
||||
"warehouses_and_quantities": [],
|
||||
}
|
||||
else:
|
||||
products[key]["deal_products"].append(deal_product)
|
||||
products[key]["quantity"] += deal_product.quantity
|
||||
if not products[key]["additional_info"]:
|
||||
products[key]["additional_info"] = deal_product.product.additional_info
|
||||
|
||||
return products
|
||||
|
||||
async def _get_deal_by_id(self, deal_id: int) -> Optional[Card]:
|
||||
deal: Card | None = await self._session.scalar(
|
||||
select(Card)
|
||||
.where(Card.id == deal_id)
|
||||
.options(
|
||||
selectinload(Card.products).selectinload(CardProduct.services),
|
||||
selectinload(Card.products).selectinload(CardProduct.product).selectinload(Product.barcodes),
|
||||
selectinload(Card.services).selectinload(DealServiceModel.service),
|
||||
selectinload(Card.status_history),
|
||||
selectinload(Card.group).selectinload(CardGroup.cards),
|
||||
joinedload(Card.client),
|
||||
joinedload(Card.shipping_warehouse),
|
||||
)
|
||||
)
|
||||
return deal
|
||||
|
||||
def _set_deals_ids_header(self):
|
||||
self.deal_doc["deal_ids_header"] = f"ID: {self.deal.id}"
|
||||
if self.deal.group:
|
||||
self.deal_doc["deal_ids_header"] = "ID: " + ", ".join(str(d.id) for d in self.deal.group.cards)
|
||||
|
||||
async def _create_deal_tech_spec_document_html(self, deal_id: int):
|
||||
deal = await self._get_deal_by_id(deal_id)
|
||||
if not deal:
|
||||
return ""
|
||||
self.deal = deal
|
||||
|
||||
self._set_deals_ids_header()
|
||||
|
||||
if deal.group:
|
||||
deals = await CardGroupService(self._session).get_cards_by_group_id(deal.group.id)
|
||||
for d in deals:
|
||||
self.deal_doc["deals"].append(d)
|
||||
grouped_products = await self._group_deal_products_by_products(d.products)
|
||||
for product in grouped_products.values():
|
||||
key = _regen_key_for_product(product)
|
||||
if key not in self.deal_doc["products"]:
|
||||
self.deal_doc["products"][key] = product
|
||||
else:
|
||||
self.deal_doc["products"][key]["quantity"] += product["quantity"]
|
||||
self.deal_doc["products"][key]["warehouses_and_quantities"].append((
|
||||
product["deal"].shipping_warehouse.name, product["quantity"],
|
||||
))
|
||||
else:
|
||||
self.deal_doc["deals"] = [deal]
|
||||
self.deal_doc["products"] = await self._group_deal_products_by_products(deal.products)
|
||||
|
||||
product_urls: List[Optional[str]] = []
|
||||
for product in self.deal_doc["products"].values():
|
||||
if len(product["deal_products"][0].product.images) > 0:
|
||||
product_urls.append(product["deal_products"][0].product.images[0].image_url)
|
||||
else:
|
||||
product_urls.append(None)
|
||||
self.deal_doc["product_images"] = await fetch_images(product_urls)
|
||||
|
||||
template = ENV.get_template("deal/deal-tech-spec.html")
|
||||
|
||||
result = template.render({"data": self.deal_doc, "sign_place_text": "_" * 22})
|
||||
return result
|
||||
|
||||
async def create_deal_tech_spec_pdf(self, deal_id) -> BytesIO:
|
||||
doc = await self._create_deal_tech_spec_document_html(deal_id)
|
||||
pdf_file = BytesIO()
|
||||
HTML(string=doc).write_pdf(pdf_file, stylesheets=[CSS(APP_PATH + '/static/css/deal-tech-spec.css')])
|
||||
return pdf_file
|
||||
0
generators/price_list_pdf_generator/__init__.py
Normal file
0
generators/price_list_pdf_generator/__init__.py
Normal file
88
generators/price_list_pdf_generator/generator.py
Normal file
88
generators/price_list_pdf_generator/generator.py
Normal file
@@ -0,0 +1,88 @@
|
||||
from collections import defaultdict
|
||||
from io import BytesIO
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import joinedload
|
||||
from weasyprint import HTML, CSS
|
||||
|
||||
from constants import ENV, APP_PATH
|
||||
from enums.service import ServiceType
|
||||
from models import Service, ServiceCategory
|
||||
|
||||
|
||||
class PriceListPdfGenerator:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self._session = session
|
||||
|
||||
async def _get_services_data(self):
|
||||
# Получаем услуги из базы данных, отсортированные по рангу
|
||||
services = (await (
|
||||
self._session.scalars(
|
||||
select(Service)
|
||||
.options(joinedload(Service.category))
|
||||
.filter(Service.is_deleted == False)
|
||||
.order_by(Service.rank)
|
||||
)
|
||||
)).all()
|
||||
|
||||
# Группируем услуги по типу сервиса и категории
|
||||
intermediate_result = defaultdict(lambda: defaultdict(list))
|
||||
|
||||
for service in services:
|
||||
intermediate_result[service.service_type][service.category_id].append(service)
|
||||
|
||||
# Формируем окончательный результат с сортировкой категорий
|
||||
final_result = defaultdict(dict)
|
||||
|
||||
for service_type, categories_dict in intermediate_result.items():
|
||||
# Извлекаем уникальные категории
|
||||
categories = {service.category for services in categories_dict.values() for service in services}
|
||||
|
||||
# Определяем функцию сортировки категорий по рангу
|
||||
def category_sort_key(category):
|
||||
if service_type == ServiceType.DEAL_SERVICE:
|
||||
return category.card_service_rank
|
||||
else:
|
||||
return category.product_service_rank
|
||||
|
||||
# Сортируем категории по определенному рангу
|
||||
sorted_categories = sorted(categories, key=category_sort_key)
|
||||
|
||||
# Строим словарь категорий в отсортированном порядке
|
||||
sorted_categories_dict = {}
|
||||
for category in sorted_categories:
|
||||
sorted_categories_dict[category.id] = categories_dict[category.id]
|
||||
|
||||
final_result[service_type] = sorted_categories_dict
|
||||
|
||||
final_final_result = {}
|
||||
for service_type in [ServiceType.DEAL_SERVICE, ServiceType.PRODUCT_SERVICE]:
|
||||
final_final_result[service_type] = final_result[service_type]
|
||||
return dict(final_final_result)
|
||||
|
||||
async def _create_price_list_html(self):
|
||||
categories = await self._session.scalars(select(ServiceCategory))
|
||||
categories_dict = {category.id: category.name for category in categories}
|
||||
|
||||
services_data = await self._get_services_data()
|
||||
|
||||
service_type_dict = {
|
||||
ServiceType.DEAL_SERVICE: "Общие услуги",
|
||||
ServiceType.PRODUCT_SERVICE: "Услуги фулфилмента",
|
||||
}
|
||||
|
||||
template = ENV.get_template("price-list.html")
|
||||
|
||||
result = template.render({
|
||||
"services_data": services_data,
|
||||
"categories_dict": categories_dict,
|
||||
"service_type_dict": service_type_dict,
|
||||
})
|
||||
return result
|
||||
|
||||
async def create_price_list_pdf(self) -> BytesIO:
|
||||
doc = await self._create_price_list_html()
|
||||
pdf_file = BytesIO()
|
||||
HTML(string=doc).write_pdf(pdf_file, stylesheets=[CSS(APP_PATH + '/static/css/price-list.css')])
|
||||
return pdf_file
|
||||
1
generators/residual_qr_code_generator/__init__.py
Normal file
1
generators/residual_qr_code_generator/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .generator import ResidualQRCodeGenerator
|
||||
166
generators/residual_qr_code_generator/generator.py
Normal file
166
generators/residual_qr_code_generator/generator.py
Normal file
@@ -0,0 +1,166 @@
|
||||
from io import BytesIO
|
||||
from typing import Optional
|
||||
|
||||
from reportlab.lib.units import mm
|
||||
from reportlab.pdfgen.canvas import Canvas
|
||||
from reportlab.platypus import Paragraph, SimpleDocTemplate, PageBreak, Frame
|
||||
from reportlab_qrcode import QRCodeImage
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload, joinedload
|
||||
|
||||
from barcodes.pdf.pdf_maker import PdfMaker
|
||||
from generators.base_pdf_card_generator.base_pdf_card_generator import BasePdfCardGenerator
|
||||
from models import Client, ResidualPallet, ResidualBox
|
||||
|
||||
|
||||
class ResidualQRCodeGenerator(BasePdfCardGenerator):
|
||||
async def _get_client_by_id(self, client_id: int) -> Optional[Client]:
|
||||
stmt = (
|
||||
select(Client)
|
||||
.where(Client.id == client_id)
|
||||
.options(
|
||||
selectinload(Client.boxes),
|
||||
selectinload(Client.pallets)
|
||||
.selectinload(ResidualPallet.boxes),
|
||||
)
|
||||
)
|
||||
client = (await self._session.execute(stmt)).one_or_none()
|
||||
return client[0] if client else None
|
||||
|
||||
@staticmethod
|
||||
def _split_string(string: str) -> list[int]:
|
||||
if not string:
|
||||
return []
|
||||
return [int(item) for item in string.split(",")]
|
||||
|
||||
async def generate(self, pallet_ids_str: str, box_ids_str: str):
|
||||
pallet_ids = self._split_string(pallet_ids_str)
|
||||
box_ids = self._split_string(box_ids_str)
|
||||
|
||||
pallets_buffer = await self.generate_pallets(pallet_ids)
|
||||
boxes_buffer = await self.generate_boxes(box_ids)
|
||||
return self._merge_pdfs([pallets_buffer, boxes_buffer])
|
||||
|
||||
async def _get_pallets(self, pallet_ids: list[int]) -> list[ResidualPallet]:
|
||||
stmt = (
|
||||
select(ResidualPallet)
|
||||
.options(
|
||||
joinedload(ResidualPallet.client),
|
||||
)
|
||||
.where(ResidualPallet.id.in_(pallet_ids))
|
||||
.order_by(ResidualPallet.id.asc())
|
||||
)
|
||||
pallets = await self._session.execute(stmt)
|
||||
return list(pallets.unique().scalars().all())
|
||||
|
||||
def _generate_empty_doc(self) -> BytesIO:
|
||||
buffer = BytesIO()
|
||||
doc: SimpleDocTemplate = self._create_doc(buffer)
|
||||
doc.build([])
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
|
||||
async def generate_pallets(self, pallet_ids: list[int]) -> BytesIO:
|
||||
if not pallet_ids:
|
||||
return self._generate_empty_doc()
|
||||
|
||||
buffer = BytesIO()
|
||||
doc: SimpleDocTemplate = self._create_doc(buffer)
|
||||
|
||||
pallet_idx = 0
|
||||
pallets = await self._get_pallets(pallet_ids)
|
||||
client = pallets[0].client
|
||||
|
||||
def on_page(canvas: Canvas, _):
|
||||
nonlocal pallet_idx, pallets
|
||||
pallet_id = pallets[pallet_idx].id
|
||||
|
||||
qr = QRCodeImage(f"P{pallet_id}", size=30 * mm)
|
||||
qr.drawOn(canvas, 0, 30)
|
||||
|
||||
object_name = Paragraph(f"Паллет", self.small_centered_style)
|
||||
pallet_id = Paragraph(f"ID: П{pallet_id}", self.small_centered_style)
|
||||
|
||||
frame = Frame(x1=28 * mm, y1=3 * mm, width=30 * mm, height=30 * mm)
|
||||
frame.addFromList([object_name, pallet_id], canvas)
|
||||
|
||||
client_name = Paragraph(f"Клиент: {client.name}", self.small_centered_style)
|
||||
frame = Frame(x1=0 * mm, y1=-7 * mm, width=58 * mm, height=20 * mm)
|
||||
frame.addFromList([client_name], canvas)
|
||||
|
||||
pallet_idx += 1
|
||||
|
||||
elements = []
|
||||
for _ in range(len(pallets)):
|
||||
elements.append(Paragraph("", self.medium_style))
|
||||
elements.append(PageBreak())
|
||||
|
||||
doc.build(elements, on_page, on_page)
|
||||
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
|
||||
async def _get_boxes(self, box_ids: list[int]) -> list[ResidualBox]:
|
||||
stmt = (
|
||||
select(ResidualBox)
|
||||
.options(
|
||||
joinedload(ResidualBox.client),
|
||||
selectinload(ResidualBox.pallet)
|
||||
.joinedload(ResidualPallet.client),
|
||||
)
|
||||
.where(ResidualBox.id.in_(box_ids))
|
||||
.order_by(ResidualBox.id.asc())
|
||||
)
|
||||
boxes = await self._session.execute(stmt)
|
||||
return list(boxes.unique().scalars().all())
|
||||
|
||||
async def generate_boxes(self, box_ids: list[int]) -> BytesIO:
|
||||
if not box_ids:
|
||||
return self._generate_empty_doc()
|
||||
|
||||
buffer = BytesIO()
|
||||
doc: SimpleDocTemplate = self._create_doc(buffer)
|
||||
|
||||
box_idx = 0
|
||||
boxes = await self._get_boxes(box_ids)
|
||||
client = boxes[0].client or boxes[0].pallet.client
|
||||
|
||||
def on_page(canvas: Canvas, _):
|
||||
nonlocal box_idx
|
||||
box_id = boxes[box_idx].id
|
||||
|
||||
qr = QRCodeImage(f"K{box_id}", size=30 * mm)
|
||||
qr.drawOn(canvas, 0, 30)
|
||||
|
||||
box_info = [
|
||||
Paragraph("Короб", self.small_centered_style),
|
||||
Paragraph(f"ID: К{box_id}", self.small_centered_style),
|
||||
]
|
||||
if boxes[box_idx].pallet_id:
|
||||
box_info.append(Paragraph("На паллете", self.small_centered_style))
|
||||
box_info.append(Paragraph(f"ID: П{boxes[box_idx].pallet_id}", self.small_centered_style))
|
||||
|
||||
frame = Frame(x1=28 * mm, y1=8 * mm, width=30 * mm, height=30 * mm)
|
||||
frame.addFromList(box_info, canvas)
|
||||
|
||||
client_name = Paragraph(f"Клиент: {client.name}", self.small_centered_style)
|
||||
frame = Frame(x1=0 * mm, y1=-7 * mm, width=58 * mm, height=20 * mm)
|
||||
frame.addFromList([client_name], canvas)
|
||||
|
||||
box_idx += 1
|
||||
|
||||
elements = []
|
||||
for _ in range(len(boxes)):
|
||||
elements.append(Paragraph("", self.medium_style))
|
||||
elements.append(PageBreak())
|
||||
|
||||
doc.build(elements, on_page, on_page)
|
||||
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
|
||||
def _merge_pdfs(self, buffers: list[BytesIO]) -> BytesIO:
|
||||
pdf_maker = PdfMaker((self.page_width, self.page_height))
|
||||
for buffer in buffers:
|
||||
pdf_maker.add_pdfs(buffer)
|
||||
return pdf_maker.get_bytes()
|
||||
1
generators/services_excel_generator/__init__.py
Normal file
1
generators/services_excel_generator/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .core import ServiceExcelExporter
|
||||
183
generators/services_excel_generator/core.py
Normal file
183
generators/services_excel_generator/core.py
Normal file
@@ -0,0 +1,183 @@
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
|
||||
import openpyxl
|
||||
import openpyxl.styles
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from constants import APP_PATH
|
||||
from enums.service import ServiceType
|
||||
from models import Service, ServiceCategory
|
||||
|
||||
|
||||
class ServiceExcelExporter:
|
||||
SERVICE_TYPE_FONT = openpyxl.styles.Font(bold=True, size=14, name='Calibri')
|
||||
SERVICE_TYPE_FILL = openpyxl.styles.PatternFill(start_color='0000FF', end_color='0000FF', fill_type='solid')
|
||||
SERVICE_TYPE_ALIGNMENT = openpyxl.styles.Alignment(horizontal='center')
|
||||
SERVICE_TYPE_BORDER = openpyxl.styles.Border(
|
||||
left=openpyxl.styles.Side(style='medium'),
|
||||
right=openpyxl.styles.Side(style='medium'),
|
||||
top=openpyxl.styles.Side(style='medium'),
|
||||
bottom=openpyxl.styles.Side(style='medium')
|
||||
)
|
||||
|
||||
CATEGORY_FONT = openpyxl.styles.Font(bold=True, size=12, name='Calibri')
|
||||
CATEGORY_FILL = openpyxl.styles.PatternFill(start_color='DBEEF4', end_color='DBEEF4', fill_type='solid')
|
||||
CATEGORY_ALIGNMENT = openpyxl.styles.Alignment(horizontal='center')
|
||||
CATEGORY_BORDER = SERVICE_TYPE_BORDER # Same as service type border
|
||||
|
||||
EVEN_ROW_FILL = openpyxl.styles.PatternFill(start_color='EBF1DE', end_color='EBF1DE', fill_type='solid')
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
self.template_path = Path(APP_PATH) / 'static' / 'excel' / 'services.xlsx'
|
||||
self.workbook = None
|
||||
self.worksheet = None
|
||||
self.start_row = 12
|
||||
self.name_column = 'A'
|
||||
self.price_column = 'B'
|
||||
self.categories_dict = {}
|
||||
self.service_type_dict = {
|
||||
ServiceType.DEAL_SERVICE: 'Общие услуги',
|
||||
ServiceType.PRODUCT_SERVICE: 'Услуги фулфилмента'
|
||||
}
|
||||
|
||||
async def get_services(self):
|
||||
"""Fetch and organize services from the database."""
|
||||
services = (await self.session.scalars(
|
||||
select(Service)
|
||||
.options(joinedload(Service.category))
|
||||
.filter(Service.is_deleted == False)
|
||||
.order_by(Service.rank)
|
||||
)).all()
|
||||
|
||||
intermediate_result = defaultdict(lambda: defaultdict(list))
|
||||
for service in services:
|
||||
intermediate_result[service.service_type][service.category_id].append(service)
|
||||
|
||||
final_result = defaultdict(dict)
|
||||
for service_type, categories_dict in intermediate_result.items():
|
||||
categories = {service.category for services in categories_dict.values() for service in services}
|
||||
|
||||
def category_sort_key(category):
|
||||
return category.card_service_rank if service_type == ServiceType.DEAL_SERVICE else category.product_service_rank
|
||||
|
||||
sorted_categories = sorted(categories, key=category_sort_key)
|
||||
sorted_categories_dict = {category.id: categories_dict[category.id] for category in sorted_categories}
|
||||
final_result[service_type] = sorted_categories_dict
|
||||
final_final_result = {}
|
||||
for service_type in [ServiceType.DEAL_SERVICE, ServiceType.PRODUCT_SERVICE]:
|
||||
final_final_result[service_type] = final_result[service_type]
|
||||
return dict(final_final_result)
|
||||
|
||||
async def get_categories(self):
|
||||
"""Fetch categories from the database."""
|
||||
categories = (await self.session.scalars(
|
||||
select(ServiceCategory)
|
||||
.filter(ServiceCategory.is_deleted == False)
|
||||
)).all()
|
||||
self.categories_dict = {category.id: category for category in categories}
|
||||
|
||||
@staticmethod
|
||||
def format_ruble_number(number: int):
|
||||
"""Format a number with spaces as thousand separators."""
|
||||
return f'{number:,}'.replace(',', ' ')
|
||||
|
||||
def get_price_value(self, service: Service):
|
||||
"""Get the price value string for a service."""
|
||||
if service.price_ranges:
|
||||
price_ranges_length = len(service.price_ranges)
|
||||
if price_ranges_length == 1:
|
||||
price = self.format_ruble_number(int(service.price_ranges[0].price))
|
||||
return f'{price} ₽'
|
||||
|
||||
result = []
|
||||
for idx, price_range in enumerate(service.price_ranges):
|
||||
price = self.format_ruble_number(int(price_range.price))
|
||||
if idx == price_ranges_length - 1:
|
||||
result.append(f'от {price_range.from_quantity} шт: {price} ₽')
|
||||
else:
|
||||
result.append(f'{price_range.from_quantity} шт - {price_range.to_quantity} шт: {price} ₽')
|
||||
return '\n'.join(result)
|
||||
else:
|
||||
price = self.format_ruble_number(int(service.price))
|
||||
return f'{price} ₽'
|
||||
|
||||
def load_template(self):
|
||||
"""Load the Excel template."""
|
||||
self.workbook = openpyxl.load_workbook(self.template_path)
|
||||
self.worksheet = self.workbook.active
|
||||
|
||||
def format_service_type_cell(self, cell):
|
||||
"""Apply formatting to a service type cell."""
|
||||
cell.font = self.SERVICE_TYPE_FONT
|
||||
cell.fill = self.SERVICE_TYPE_FILL
|
||||
cell.alignment = self.SERVICE_TYPE_ALIGNMENT
|
||||
cell.border = self.SERVICE_TYPE_BORDER
|
||||
|
||||
def format_category_cell(self, cell):
|
||||
"""Apply formatting to a category cell."""
|
||||
cell.font = self.CATEGORY_FONT
|
||||
cell.fill = self.CATEGORY_FILL
|
||||
cell.alignment = self.CATEGORY_ALIGNMENT
|
||||
cell.border = self.CATEGORY_BORDER
|
||||
|
||||
def format_service_row(self, name_cell, price_cell, is_even_row):
|
||||
"""Apply formatting to a service row."""
|
||||
name_cell.alignment = openpyxl.styles.Alignment(wrap_text=True, vertical='center')
|
||||
price_cell.alignment = openpyxl.styles.Alignment(wrap_text=True, horizontal='right', vertical="center")
|
||||
if is_even_row:
|
||||
name_cell.fill = self.EVEN_ROW_FILL
|
||||
price_cell.fill = self.EVEN_ROW_FILL
|
||||
|
||||
def write_service_type_row(self, service_type):
|
||||
"""Write a service type row to the worksheet."""
|
||||
row = self.start_row
|
||||
cell = self.worksheet[f'{self.name_column}{row}']
|
||||
cell.value = self.service_type_dict[service_type]
|
||||
self.worksheet.merge_cells(f'{self.name_column}{row}:{self.price_column}{row}')
|
||||
self.format_service_type_cell(cell)
|
||||
self.start_row += 1
|
||||
|
||||
def write_category_row(self, category_name):
|
||||
"""Write a category row to the worksheet."""
|
||||
row = self.start_row
|
||||
cell = self.worksheet[f'{self.name_column}{row}']
|
||||
cell.value = category_name
|
||||
self.worksheet.merge_cells(f'{self.name_column}{row}:{self.price_column}{row}')
|
||||
self.format_category_cell(cell)
|
||||
self.start_row += 1
|
||||
|
||||
def write_service_row(self, service, counter):
|
||||
"""Write a service row to the worksheet."""
|
||||
row = self.start_row
|
||||
name_cell = self.worksheet[f'{self.name_column}{row}']
|
||||
price_cell = self.worksheet[f'{self.price_column}{row}']
|
||||
name_cell.value = service.name
|
||||
price_cell.value = self.get_price_value(service)
|
||||
self.format_service_row(name_cell, price_cell, counter % 2 == 0)
|
||||
self.start_row += 1
|
||||
|
||||
async def generate_excel(self):
|
||||
"""Generate the Excel file with services and categories."""
|
||||
start = time.time()
|
||||
await self.get_categories()
|
||||
services = await self.get_services()
|
||||
self.load_template()
|
||||
|
||||
for service_type, categories in services.items():
|
||||
self.write_service_type_row(service_type)
|
||||
for category_id, services_list in categories.items():
|
||||
category_name = self.categories_dict[category_id].name
|
||||
self.write_category_row(category_name)
|
||||
for idx, service in enumerate(services_list):
|
||||
self.write_service_row(service, idx)
|
||||
|
||||
result = BytesIO()
|
||||
self.workbook.save(result)
|
||||
result.seek(0)
|
||||
print('Elapsed time:', time.time() - start)
|
||||
return result
|
||||
1
generators/shipping_qr_code_generator/__init__.py
Normal file
1
generators/shipping_qr_code_generator/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .generator import ShippingQRCodeGenerator
|
||||
134
generators/shipping_qr_code_generator/generator.py
Normal file
134
generators/shipping_qr_code_generator/generator.py
Normal file
@@ -0,0 +1,134 @@
|
||||
from io import BytesIO
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import HTTPException
|
||||
from reportlab.lib.units import mm
|
||||
from reportlab.pdfgen.canvas import Canvas
|
||||
from reportlab.platypus import Paragraph, SimpleDocTemplate, Frame, PageBreak
|
||||
from reportlab_qrcode import QRCodeImage
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.orm import joinedload, selectinload
|
||||
|
||||
from constants import DOMAIN_NAME
|
||||
from generators.base_pdf_card_generator.base_pdf_card_generator import BasePdfCardGenerator
|
||||
from models import Card, ShippingWarehouse, Pallet
|
||||
from models.shipping import Box
|
||||
|
||||
|
||||
class ShippingQRCodeGenerator(BasePdfCardGenerator):
|
||||
async def _get_deal_by_id(self, deal_id: int) -> Optional[Card]:
|
||||
stmt = (
|
||||
select(Card)
|
||||
.where(Card.id == deal_id)
|
||||
.options(
|
||||
joinedload(Card.shipping_warehouse),
|
||||
selectinload(Card.pallets),
|
||||
)
|
||||
)
|
||||
deal = (await self._session.execute(stmt)).one_or_none()
|
||||
return deal[0] if deal else None
|
||||
|
||||
async def generate_deal(self, deal_id: int) -> BytesIO:
|
||||
deal = await self._get_deal_by_id(deal_id)
|
||||
if not deal:
|
||||
raise HTTPException(status_code=404, detail=f"Сделка с ID {deal_id}a не найдена")
|
||||
|
||||
buffer = BytesIO()
|
||||
doc: SimpleDocTemplate = self._create_doc(buffer)
|
||||
|
||||
deal_link = f"{DOMAIN_NAME}/deals/{deal_id}"
|
||||
shipping_warehouse = await self._session.get(ShippingWarehouse, deal.shipping_warehouse_id)
|
||||
warehouse_name = shipping_warehouse.name if shipping_warehouse else ""
|
||||
|
||||
def on_first_page(canvas: Canvas, doc):
|
||||
qr = QRCodeImage(deal_link, size=30 * mm)
|
||||
qr.drawOn(canvas, 0, 30)
|
||||
|
||||
deal_id_paragraph = Paragraph(f"ID: {deal_id}", self.small_centered_style)
|
||||
deal_name_paragraph = Paragraph(str(deal.name), self.small_centered_style)
|
||||
|
||||
frame = Frame(x1=28 * mm, y1=5 * mm, width=30 * mm, height=30 * mm)
|
||||
frame.addFromList([deal_id_paragraph, deal_name_paragraph], canvas)
|
||||
|
||||
warehouse_paragraph = Paragraph(warehouse_name, self.small_centered_style)
|
||||
frame = Frame(x1=0 * mm, y1=-7 * mm, width=58 * mm, height=20 * mm)
|
||||
frame.addFromList([warehouse_paragraph], canvas)
|
||||
|
||||
empty_paragraph = Paragraph("", self.small_centered_style)
|
||||
elements = [empty_paragraph]
|
||||
doc.build(elements, on_first_page)
|
||||
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
|
||||
async def generate_pallets(self, deal_id: int):
|
||||
deal = await self._get_deal_by_id(deal_id)
|
||||
if not deal:
|
||||
raise HTTPException(status_code=404, detail=f"Сделка с ID {deal_id}a не найдена")
|
||||
|
||||
buffer = BytesIO()
|
||||
doc: SimpleDocTemplate = self._create_doc(buffer)
|
||||
|
||||
shipping_warehouse = await self._session.get(ShippingWarehouse, deal.shipping_warehouse_id)
|
||||
warehouse_name = shipping_warehouse.name if shipping_warehouse else ""
|
||||
|
||||
elements = []
|
||||
|
||||
for pallet in deal.pallets:
|
||||
elements.append(Paragraph(f"ID: {deal_id}", self.medium_style))
|
||||
elements.append(Paragraph(str(deal.name), self.medium_style))
|
||||
elements.append(Paragraph(f"Паллет П{pallet.id}", self.medium_style))
|
||||
elements.append(Paragraph(warehouse_name, self.medium_style))
|
||||
elements.append(PageBreak())
|
||||
|
||||
doc.build(elements)
|
||||
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
|
||||
async def _get_boxes_on_pallets_count(self, deal_id):
|
||||
stmt_boxes_on_pallets = (
|
||||
select(
|
||||
Pallet.id,
|
||||
func.count(Box.id).label("box_count"),
|
||||
)
|
||||
.join(Box, isouter=True)
|
||||
.where(Pallet.card_id == deal_id)
|
||||
.group_by(Pallet.id)
|
||||
)
|
||||
pallets = (await self._session.execute(stmt_boxes_on_pallets)).all()
|
||||
return pallets
|
||||
|
||||
async def generate_boxes(self, deal_id: int) -> BytesIO:
|
||||
deal = await self._get_deal_by_id(deal_id)
|
||||
if not deal:
|
||||
raise HTTPException(status_code=404, detail=f"Сделка с ID {deal_id}a не найдена")
|
||||
|
||||
shipping_warehouse = await self._session.get(ShippingWarehouse, deal.shipping_warehouse_id)
|
||||
warehouse_name = shipping_warehouse.name if shipping_warehouse else ""
|
||||
|
||||
buffer = BytesIO()
|
||||
doc: SimpleDocTemplate = self._create_doc(buffer)
|
||||
|
||||
elements = []
|
||||
|
||||
for box in deal.boxes:
|
||||
elements.append(Paragraph(f"ID: {deal_id}", self.medium_style))
|
||||
elements.append(Paragraph(str(deal.name), self.medium_style))
|
||||
elements.append(Paragraph(f"Короб K{box.id}", self.medium_style))
|
||||
elements.append(Paragraph(warehouse_name, self.medium_style))
|
||||
elements.append(PageBreak())
|
||||
|
||||
for pallet in deal.pallets:
|
||||
for box in pallet.boxes:
|
||||
elements.append(Paragraph(f"ID: {deal_id}", self.medium_style))
|
||||
elements.append(Paragraph(str(deal.name), self.medium_style))
|
||||
box_label = f"Паллет П{pallet.id}, Короб K{box.id}"
|
||||
elements.append(Paragraph(box_label, self.medium_style))
|
||||
elements.append(Paragraph(warehouse_name, self.medium_style))
|
||||
elements.append(PageBreak())
|
||||
|
||||
doc.build(elements)
|
||||
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
1
generators/warehouse_place_qr_code_generator/__init__.py
Normal file
1
generators/warehouse_place_qr_code_generator/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .generator import WarehousePlaceQRCodeGenerator
|
||||
84
generators/warehouse_place_qr_code_generator/generator.py
Normal file
84
generators/warehouse_place_qr_code_generator/generator.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import os
|
||||
from io import BytesIO
|
||||
|
||||
from reportlab.graphics import renderPDF
|
||||
from reportlab.lib.units import mm
|
||||
from reportlab.pdfgen.canvas import Canvas
|
||||
from reportlab.platypus import Paragraph, SimpleDocTemplate, Frame
|
||||
from reportlab_qrcode import QRCodeImage
|
||||
from svglib.svglib import svg2rlg
|
||||
|
||||
from constants import STATIC_PATH
|
||||
from generators.base_pdf_card_generator.base_pdf_card_generator import BasePdfCardGenerator
|
||||
from services.warehouse_management import WmsService
|
||||
|
||||
|
||||
class WarehousePlaceQRCodeGenerator(BasePdfCardGenerator):
|
||||
@staticmethod
|
||||
def _get_big_font_size_from_str(value: str | int) -> int:
|
||||
value = str(value)
|
||||
dots = value.count(".")
|
||||
string = value.replace(".", "")
|
||||
length = len(string) + dots / 3
|
||||
|
||||
if length > 8:
|
||||
return 30
|
||||
elif length > 5:
|
||||
return 37
|
||||
return 48
|
||||
|
||||
@staticmethod
|
||||
def _get_mid_font_size_from_str(value: str | int) -> int:
|
||||
value = str(value)
|
||||
dots = value.count(".")
|
||||
string = value.replace(".", "")
|
||||
length = len(string) + dots / 3
|
||||
|
||||
if length > 8:
|
||||
return 22
|
||||
elif length > 6:
|
||||
return 26
|
||||
elif length > 4:
|
||||
return 30
|
||||
return 34
|
||||
|
||||
async def generate(self, place_id: int, is_short: bool) -> BytesIO:
|
||||
icon_path = os.path.join(STATIC_PATH, "icons", "denco.svg")
|
||||
|
||||
service = WmsService(self._session)
|
||||
place_code = await service.get_code_of_place(place_id)
|
||||
|
||||
buffer = BytesIO()
|
||||
doc: SimpleDocTemplate = self._create_doc(buffer)
|
||||
|
||||
def on_first_page(canvas: Canvas, doc):
|
||||
svg_icon_scale = 0.04
|
||||
drawing = svg2rlg(icon_path)
|
||||
drawing.scale(svg_icon_scale, svg_icon_scale)
|
||||
|
||||
if is_short:
|
||||
qr = QRCodeImage(place_code, size=17 * mm)
|
||||
qr.drawOn(canvas, 42 * mm, -4)
|
||||
|
||||
font_size = self._get_mid_font_size_from_str(place_code)
|
||||
number_paragraph = Paragraph(str(place_code), self._get_paragraph_style(font_size))
|
||||
number_frame = Frame(x1=0 * mm, y1=-15 * mm, width=45 * mm, height=30 * mm)
|
||||
renderPDF.draw(drawing, canvas, 3, 35)
|
||||
else:
|
||||
qr = QRCodeImage(place_code, size=20 * mm)
|
||||
qr.drawOn(canvas, 39 * mm, -2 * mm)
|
||||
|
||||
font_size = self._get_big_font_size_from_str(place_code)
|
||||
number_paragraph = Paragraph(str(place_code), self._get_paragraph_style(font_size))
|
||||
number_frame = Frame(x1=0 * mm, y1=9 * mm, width=60 * mm, height=30 * mm)
|
||||
renderPDF.draw(drawing, canvas, 67, 3)
|
||||
|
||||
number_frame.addFromList([number_paragraph], canvas)
|
||||
|
||||
empty_paragraph = Paragraph("", self.small_centered_style)
|
||||
elements = [empty_paragraph]
|
||||
doc.build(elements, on_first_page)
|
||||
|
||||
buffer.seek(0)
|
||||
rotated = self._rotate_pdf(buffer)
|
||||
return rotated
|
||||
1
generators/work_shifts_qr_code_generator/__init__.py
Normal file
1
generators/work_shifts_qr_code_generator/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .generator import WorkShiftsQRCodeGenerator
|
||||
32
generators/work_shifts_qr_code_generator/generator.py
Normal file
32
generators/work_shifts_qr_code_generator/generator.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from io import BytesIO
|
||||
|
||||
from reportlab.lib.units import mm
|
||||
from reportlab.pdfgen.canvas import Canvas
|
||||
from reportlab.platypus import Paragraph
|
||||
from reportlab_qrcode import QRCodeImage
|
||||
|
||||
from generators.base_pdf_card_generator.base_pdf_card_generator import BasePdfCardGenerator
|
||||
from models import User
|
||||
from services.user import UserService
|
||||
|
||||
|
||||
class WorkShiftsQRCodeGenerator(BasePdfCardGenerator):
|
||||
async def generate(self, user_id: int) -> BytesIO:
|
||||
buffer = BytesIO()
|
||||
doc = self._create_doc(buffer)
|
||||
|
||||
def on_first_page(canvas: Canvas, doc):
|
||||
qr = QRCodeImage(str(user_id), size=33 * mm)
|
||||
qr.drawOn(canvas, 34, 0)
|
||||
|
||||
user: User = await UserService(self._session).get_by_id(user_id)
|
||||
position = user.position.name if user.position else ""
|
||||
user_info = Paragraph(
|
||||
f"{user.first_name} {user.second_name}\n{position}",
|
||||
self.small_centered_style
|
||||
)
|
||||
|
||||
doc.build([user_info], on_first_page)
|
||||
|
||||
buffer.seek(0)
|
||||
return buffer
|
||||
1
logger/__init__.py
Normal file
1
logger/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from logger.builder import logger_builder as logger_builder
|
||||
60
logger/builder.py
Normal file
60
logger/builder.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import logging
|
||||
import logging.config
|
||||
|
||||
from logger.constants import (
|
||||
LEVEL_NAME,
|
||||
BACKUP_COUNT,
|
||||
LOGS_FOLDER,
|
||||
MAX_LOG_FILE_SIZE_BYTES,
|
||||
)
|
||||
from logger.formatter import JsonFormatter
|
||||
from logger.gunzip_rotating_file_handler import GunZipRotatingFileHandler
|
||||
from logger.filters import LevelFilter, RequestIdFilter
|
||||
from utils.singleton import Singleton
|
||||
|
||||
|
||||
class LoggerBuilder(metaclass=Singleton):
|
||||
def get_logger(self) -> logging.Logger:
|
||||
logger = logging.getLogger("crm")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.handlers.clear()
|
||||
self.set_handlers(logger)
|
||||
return logger
|
||||
|
||||
def set_handlers(self, logger: logging.Logger):
|
||||
LOGGER_LEVEL_STEP = 10
|
||||
|
||||
for level in range(logging.DEBUG, logging.CRITICAL + 1, LOGGER_LEVEL_STEP):
|
||||
logger.addHandler(self.create_rotating_file_handler(level))
|
||||
|
||||
logger.addHandler(self.create_console_handler())
|
||||
|
||||
@classmethod
|
||||
def create_rotating_file_handler(cls, level: int) -> GunZipRotatingFileHandler:
|
||||
folder = LOGS_FOLDER / LEVEL_NAME[level]
|
||||
folder.mkdir(parents=True, exist_ok=True)
|
||||
filename = LEVEL_NAME[level] + ".log"
|
||||
|
||||
file_handler = GunZipRotatingFileHandler(
|
||||
folder / filename,
|
||||
maxBytes=MAX_LOG_FILE_SIZE_BYTES,
|
||||
encoding="UTF-8",
|
||||
backupCount=BACKUP_COUNT[level],
|
||||
)
|
||||
|
||||
file_handler.addFilter(LevelFilter(level))
|
||||
file_handler.addFilter(RequestIdFilter())
|
||||
file_handler.setFormatter(JsonFormatter())
|
||||
|
||||
return file_handler
|
||||
|
||||
@classmethod
|
||||
def create_console_handler(cls) -> logging.StreamHandler:
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.INFO)
|
||||
|
||||
console_handler.setFormatter(JsonFormatter())
|
||||
return console_handler
|
||||
|
||||
|
||||
logger_builder = LoggerBuilder()
|
||||
25
logger/constants.py
Normal file
25
logger/constants.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from constants import APP_PATH
|
||||
|
||||
LEVEL_NAME = {
|
||||
logging.FATAL: "fatal",
|
||||
logging.CRITICAL: "critical",
|
||||
logging.ERROR: "error",
|
||||
logging.WARNING: "warning",
|
||||
logging.INFO: "info",
|
||||
logging.DEBUG: "debug",
|
||||
}
|
||||
|
||||
BACKUP_COUNT = {
|
||||
logging.FATAL: 5,
|
||||
logging.CRITICAL: 5,
|
||||
logging.ERROR: 4,
|
||||
logging.WARNING: 3,
|
||||
logging.INFO: 2,
|
||||
logging.DEBUG: 1,
|
||||
}
|
||||
|
||||
MAX_LOG_FILE_SIZE_BYTES = 30 * 1024 * 1024 # 30 Mb
|
||||
LOGS_FOLDER = Path(APP_PATH) / Path("logs")
|
||||
17
logger/filters.py
Normal file
17
logger/filters.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
|
||||
class LevelFilter(logging.Filter):
|
||||
def __init__(self, level):
|
||||
super().__init__()
|
||||
self.level = level
|
||||
|
||||
def filter(self, record):
|
||||
return record.levelno == self.level
|
||||
|
||||
|
||||
class RequestIdFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
record.request_id = str(uuid.uuid4())
|
||||
return True
|
||||
20
logger/formatter.py
Normal file
20
logger/formatter.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, UTC
|
||||
|
||||
|
||||
class JsonFormatter(logging.Formatter):
|
||||
def format(self, record: any):
|
||||
log_record = {
|
||||
"timestamp": datetime.now(UTC).isoformat(),
|
||||
"level": record.levelname,
|
||||
"module": record.module,
|
||||
"line": record.lineno,
|
||||
"message": record.getMessage(),
|
||||
"request_id": record.request_id,
|
||||
}
|
||||
|
||||
if record.exc_info:
|
||||
log_record["exception"] = self.formatException(record.exc_info)
|
||||
|
||||
return json.dumps(log_record, ensure_ascii=False)
|
||||
35
logger/gunzip_rotating_file_handler.py
Normal file
35
logger/gunzip_rotating_file_handler.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import gzip
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import shutil
|
||||
import os
|
||||
|
||||
|
||||
class GunZipRotatingFileHandler(RotatingFileHandler):
|
||||
def doRollover(self):
|
||||
if self.stream:
|
||||
self.stream.close()
|
||||
self.stream = None
|
||||
|
||||
if self.backupCount > 0:
|
||||
# Rotate existing backup files
|
||||
for i in range(self.backupCount - 1, 0, -1):
|
||||
sfn = self.rotation_filename("%s.%d.gz" % (self.baseFilename, i))
|
||||
dfn = self.rotation_filename("%s.%d.gz" % (self.baseFilename, i + 1))
|
||||
if os.path.exists(sfn):
|
||||
if os.path.exists(dfn):
|
||||
os.remove(dfn)
|
||||
os.rename(sfn, dfn)
|
||||
|
||||
# Compress current log file to .1.gz
|
||||
dfn = self.rotation_filename(self.baseFilename + ".1.gz")
|
||||
if os.path.exists(dfn):
|
||||
os.remove(dfn)
|
||||
|
||||
if os.path.exists(self.baseFilename):
|
||||
with open(self.baseFilename, "rb") as f_in:
|
||||
with gzip.open(dfn, "wb") as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
os.remove(self.baseFilename)
|
||||
|
||||
if not self.delay:
|
||||
self.stream = self._open()
|
||||
74
main.py
74
main.py
@@ -1,36 +1,53 @@
|
||||
import asyncio
|
||||
import platform
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from aiokafka.errors import KafkaConnectionError
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
import platform
|
||||
|
||||
from starlette.staticfiles import StaticFiles
|
||||
|
||||
import routers
|
||||
|
||||
origins = [
|
||||
'http://localhost:5173'
|
||||
]
|
||||
app = FastAPI(separate_input_output_schemas=False)
|
||||
|
||||
if platform.system() == 'Linux':
|
||||
import uvicorn.workers
|
||||
from constants import API_ROOT
|
||||
from external.kafka import consume_messages
|
||||
from external.kafka.producer import init_producer, get_producer
|
||||
|
||||
|
||||
class Worker(uvicorn.workers.UvicornWorker):
|
||||
CONFIG_KWARGS = {
|
||||
'root_path': '/api'
|
||||
}
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
try:
|
||||
await init_producer()
|
||||
producer = await get_producer()
|
||||
if producer:
|
||||
await producer.start()
|
||||
except KafkaConnectionError as e:
|
||||
print(e)
|
||||
|
||||
consumer_task = asyncio.create_task(consume_messages())
|
||||
|
||||
yield
|
||||
producer = await get_producer()
|
||||
if producer:
|
||||
await producer.stop()
|
||||
consumer_task.cancel()
|
||||
|
||||
|
||||
app = FastAPI(lifespan=lifespan, separate_input_output_schemas=False, root_path='/api')
|
||||
|
||||
# app.add_middleware(
|
||||
# CORSMiddleware,
|
||||
# allow_origins=origins,
|
||||
# allow_credentials=True,
|
||||
# allow_methods=["*"],
|
||||
# allow_headers=["*"],
|
||||
# )
|
||||
|
||||
routers_list = [
|
||||
routers.attribute_router,
|
||||
routers.auth_router,
|
||||
routers.deal_router,
|
||||
routers.card_router,
|
||||
routers.card_group_router,
|
||||
routers.client_router,
|
||||
routers.service_router,
|
||||
routers.product_router,
|
||||
@@ -44,6 +61,19 @@ routers_list = [
|
||||
routers.time_tracking_router,
|
||||
routers.billing_router,
|
||||
routers.task_router,
|
||||
routers.statistics_router,
|
||||
routers.work_shifts_router,
|
||||
routers.work_shifts_planning_router,
|
||||
routers.transaction_router,
|
||||
routers.shipping_router,
|
||||
routers.department_router,
|
||||
routers.residues_router,
|
||||
routers.project_router,
|
||||
routers.board_router,
|
||||
routers.status_router,
|
||||
routers.card_tag_router,
|
||||
routers.chat_router,
|
||||
routers.wms_router,
|
||||
]
|
||||
for router in routers_list:
|
||||
app.include_router(router)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from audioop import ratecv
|
||||
from typing import Union
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
@@ -36,7 +36,7 @@ class OzonProductSynchronizer(BaseProductSynchronizer):
|
||||
if primary_image:
|
||||
image = ProductImage(
|
||||
product=product,
|
||||
image_url=primary_image
|
||||
image_url=primary_image[0]
|
||||
)
|
||||
return [image]
|
||||
product_images = []
|
||||
@@ -84,7 +84,7 @@ class OzonProductSynchronizer(BaseProductSynchronizer):
|
||||
products_info = await self.api.get_products_info(data)
|
||||
if not products_info:
|
||||
continue
|
||||
result = products_info.get('result')
|
||||
result = products_info
|
||||
if not result:
|
||||
continue
|
||||
items = result.get('items')
|
||||
@@ -131,7 +131,7 @@ class OzonProductSynchronizer(BaseProductSynchronizer):
|
||||
products_info = await self.api.get_products_info(data)
|
||||
if not products_info:
|
||||
continue
|
||||
result = products_info.get('result')
|
||||
result = products_info
|
||||
if not result:
|
||||
continue
|
||||
items = result.get('items')
|
||||
@@ -169,6 +169,9 @@ class OzonProductSynchronizer(BaseProductSynchronizer):
|
||||
def _update_images(self, product, product_info):
|
||||
existing_images = {image.image_url for image in product.images}
|
||||
primary_image = product_info.get('primary_image')
|
||||
if isinstance(primary_image,list) and primary_image:
|
||||
primary_image = primary_image[0]
|
||||
|
||||
if primary_image and primary_image not in existing_images:
|
||||
image = ProductImage(
|
||||
product=product,
|
||||
|
||||
@@ -100,9 +100,10 @@ class WildberriesProductSynchronizer(BaseProductSynchronizer):
|
||||
new_images.append(image)
|
||||
return new_images
|
||||
|
||||
async def _process_product(self, card, size_value, nm_uuid):
|
||||
async def _process_product(self, card, size_value, nm_uuid, size):
|
||||
product = await self._create_product(card, size_value)
|
||||
barcodes = await self._create_barcodes(product, card.get('sizes')[0].get('skus') or [])
|
||||
|
||||
barcodes = await self._create_barcodes(product, size.get('skus') or [])
|
||||
images = await self._create_images(product, card.get('photos') or [])
|
||||
wildberries_product = await self._create_wildberries_product(product, nm_uuid)
|
||||
|
||||
@@ -126,7 +127,8 @@ class WildberriesProductSynchronizer(BaseProductSynchronizer):
|
||||
await self._process_product(
|
||||
card,
|
||||
size_value,
|
||||
nm_uuid
|
||||
nm_uuid,
|
||||
size
|
||||
)
|
||||
await self._write()
|
||||
|
||||
@@ -176,7 +178,8 @@ class WildberriesProductSynchronizer(BaseProductSynchronizer):
|
||||
await self._process_product(
|
||||
card,
|
||||
size_value,
|
||||
nm_uuid
|
||||
nm_uuid,
|
||||
size
|
||||
)
|
||||
await self._write()
|
||||
|
||||
|
||||
@@ -1,6 +1,187 @@
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from external.marketplace import YandexMarketplaceApi
|
||||
from external.marketplace.base.product_synchronizer import BaseProductSynchronizer
|
||||
from marketplaces.base.core import BaseMarketplaceController
|
||||
from models import Product, YandexProduct, ProductBarcode, ProductImage
|
||||
|
||||
|
||||
class YandexProductSynchronizer(BaseProductSynchronizer):
|
||||
api: YandexMarketplaceApi
|
||||
|
||||
def _try_get_param(self, offer: dict, param: str) -> Optional[str]:
|
||||
params = offer.get('params')
|
||||
if not params:
|
||||
return None
|
||||
for p in params:
|
||||
if p['name'] == param:
|
||||
return str(p['value'])
|
||||
return None
|
||||
|
||||
def _create_product(self, offer: dict) -> Product:
|
||||
return Product(
|
||||
client_id=self.marketplace.client_id,
|
||||
name=offer['name'],
|
||||
article=offer['offerId'],
|
||||
brand=self._try_get_param(offer, 'Бренд'),
|
||||
size=self._try_get_param(offer, 'Размер'),
|
||||
color=self._try_get_param(offer, 'Цвет товара'),
|
||||
composition=self._try_get_param(offer, 'Состав материала'),
|
||||
)
|
||||
|
||||
def _create_barcodes(self, product: Product, offer: dict):
|
||||
barcodes = []
|
||||
for sku in offer['barcodes']:
|
||||
barcode = ProductBarcode(
|
||||
product=product,
|
||||
barcode=sku
|
||||
)
|
||||
barcodes.append(barcode)
|
||||
return barcodes
|
||||
|
||||
def _create_images(self, product: Product, offer: dict):
|
||||
product_images = []
|
||||
images = offer.get('pictures', [])
|
||||
for image in images[:1]:
|
||||
product_image = ProductImage(
|
||||
product=product,
|
||||
image_url=image
|
||||
)
|
||||
product_images.append(product_image)
|
||||
return product_images
|
||||
|
||||
def _create_ym_product(self, product: Product):
|
||||
return YandexProduct(
|
||||
marketplace_id=self.marketplace.id,
|
||||
product=product,
|
||||
)
|
||||
|
||||
async def create_products(self):
|
||||
self._clear()
|
||||
|
||||
synchronized_articles = await self._get_synchronized_products()
|
||||
async for product in self.api.get_all_products():
|
||||
try:
|
||||
offer = product.get('offer')
|
||||
if not offer:
|
||||
continue
|
||||
if offer['offerId'] in synchronized_articles:
|
||||
continue
|
||||
product = self._create_product(offer)
|
||||
self.products.append(product)
|
||||
barcodes = self._create_barcodes(product, offer)
|
||||
product.barcodes.extend(barcodes)
|
||||
images = self._create_images(product, offer)
|
||||
product.images.extend(images)
|
||||
ym_product = self._create_ym_product(product)
|
||||
self.marketplace_products.append(ym_product)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
await self._write()
|
||||
|
||||
def _update_barcodes(self, product: Product, offer: dict):
|
||||
existing_barcodes = {barcode.barcode for barcode in product.barcodes}
|
||||
new_barcodes = []
|
||||
for barcode in offer['barcodes']:
|
||||
if barcode not in existing_barcodes:
|
||||
barcode = ProductBarcode(
|
||||
product=product,
|
||||
barcode=barcode
|
||||
)
|
||||
new_barcodes.append(barcode)
|
||||
return new_barcodes
|
||||
|
||||
def _update_images(self, product: Product, offer: dict):
|
||||
existing_images = {image.image_url for image in product.images}
|
||||
new_images = []
|
||||
images = offer.get('pictures', [])
|
||||
for image in images[:1]:
|
||||
if image not in existing_images:
|
||||
product_image = ProductImage(
|
||||
product=product,
|
||||
image_url=image
|
||||
)
|
||||
new_images.append(product_image)
|
||||
return new_images
|
||||
|
||||
async def _update_product(self, product: Product, offer: dict):
|
||||
product.name = offer['name']
|
||||
product.brand = self._try_get_param(offer, 'Бренд')
|
||||
product.size = self._try_get_param(offer, 'Размер')
|
||||
product.color = self._try_get_param(offer, 'Цвет товара')
|
||||
product.composition = self._try_get_param(offer, 'Состав материала')
|
||||
|
||||
barcodes = self._update_barcodes(product, offer)
|
||||
product.barcodes.extend(barcodes)
|
||||
images = self._update_images(product, offer)
|
||||
product.images.extend(images)
|
||||
|
||||
async def synchronize_products(self):
|
||||
self._clear()
|
||||
synchronized_products = (
|
||||
select(
|
||||
Product
|
||||
)
|
||||
.options(
|
||||
selectinload(Product.barcodes),
|
||||
selectinload(Product.images),
|
||||
)
|
||||
.select_from(
|
||||
YandexProduct
|
||||
)
|
||||
.join(
|
||||
Product
|
||||
)
|
||||
.where(
|
||||
YandexProduct.marketplace_id == self.marketplace.id
|
||||
)
|
||||
)
|
||||
result = await self.session.execute(synchronized_products)
|
||||
synchronized_products = result.scalars().all()
|
||||
synchronized_products_dict = {product.article: product for product in synchronized_products}
|
||||
synchronized_articles = list(set(synchronized_products_dict.keys()))
|
||||
async for product in self.api.get_products_by_offer_ids(synchronized_articles):
|
||||
try:
|
||||
offer = product.get('offer')
|
||||
if not offer:
|
||||
continue
|
||||
article = offer['offerId']
|
||||
if article not in synchronized_articles:
|
||||
continue
|
||||
product = synchronized_products_dict[article]
|
||||
await self._update_product(product, offer)
|
||||
except Exception as e:
|
||||
print(f'Error: {e}')
|
||||
continue
|
||||
await self._write()
|
||||
|
||||
async def _get_synchronized_products(self):
|
||||
stmt = (
|
||||
select(
|
||||
Product.article
|
||||
)
|
||||
.select_from(
|
||||
YandexProduct
|
||||
)
|
||||
.join(
|
||||
Product
|
||||
)
|
||||
)
|
||||
result = await self.session.execute(stmt)
|
||||
return set(result.scalars().all())
|
||||
|
||||
|
||||
class YandexController(BaseMarketplaceController):
|
||||
|
||||
def __init__(self, session, marketplace):
|
||||
super().__init__(session, marketplace)
|
||||
self.synchronizer = YandexProductSynchronizer(session, marketplace, self.api)
|
||||
|
||||
async def synchronize_products(self):
|
||||
await self.synchronizer.synchronize_products()
|
||||
|
||||
async def create_products(self):
|
||||
pass
|
||||
await self.synchronizer.create_products()
|
||||
|
||||
3
migrate_wms.sh
Executable file
3
migrate_wms.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
alembic -c alembic.wms.ini revision --autogenerate
|
||||
alembic -c alembic.wms.ini upgrade head
|
||||
|
||||
@@ -30,7 +30,6 @@ target_metadata = BaseModel.metadata
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
def include_object(object, name, type_, reflected, compare_to):
|
||||
print(f"{type_}: {name}")
|
||||
return True # Temporarily return True to debug all objects
|
||||
|
||||
|
||||
|
||||
1
migrations_wms/README
Normal file
1
migrations_wms/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration with an async dbapi.
|
||||
102
migrations_wms/env.py
Normal file
102
migrations_wms/env.py
Normal file
@@ -0,0 +1,102 @@
|
||||
import asyncio
|
||||
import backend.config as settings
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy.engine import Connection
|
||||
|
||||
from alembic import context
|
||||
|
||||
from backend.wms_session import engine
|
||||
from models_wms import BaseModel
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = BaseModel.metadata
|
||||
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
def include_object(object, name, type_, reflected, compare_to):
|
||||
return True # Temporarily return True to debug all objects
|
||||
|
||||
|
||||
def get_url():
|
||||
url = config.get_main_option("sqlalchemy.url").format(
|
||||
PG_LOGIN=settings.PG_LOGIN,
|
||||
PG_PASSWORD=settings.PG_PASSWORD,
|
||||
PG_HOST=settings.PG_HOST,
|
||||
PG_DATABASE=settings.PG_DATABASE,
|
||||
)
|
||||
return url
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = get_url()
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
include_schemas=True,
|
||||
include_object=include_object,
|
||||
)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
migrations_wms/script.py.mako
Normal file
28
migrations_wms/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -1,7 +1,14 @@
|
||||
from sqlalchemy.orm import configure_mappers
|
||||
|
||||
from .project import *
|
||||
from .module import *
|
||||
from .board import *
|
||||
from .status import *
|
||||
from .attribute import *
|
||||
from .card import *
|
||||
from .card_tag import *
|
||||
from .auth import *
|
||||
from .deal import *
|
||||
from .card import *
|
||||
from .client import *
|
||||
from .service import *
|
||||
from .product import *
|
||||
@@ -12,5 +19,10 @@ from .marketplace import *
|
||||
from .payroll import *
|
||||
from .billing import *
|
||||
from .marketplace_products import *
|
||||
from .card_group import *
|
||||
from .transaction import *
|
||||
from .residues import *
|
||||
from .shipping import *
|
||||
from .chat import *
|
||||
|
||||
configure_mappers()
|
||||
|
||||
113
models/attribute.py
Normal file
113
models/attribute.py
Normal file
@@ -0,0 +1,113 @@
|
||||
import pickle
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import ForeignKey, Table, Column, UniqueConstraint, Index
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from models.base import BaseModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import Project, BaseModel, Card
|
||||
|
||||
project_attribute = Table(
|
||||
'project_attribute',
|
||||
BaseModel.metadata,
|
||||
Column('project_id', ForeignKey('projects.id')),
|
||||
Column('attribute_id', ForeignKey('attributes.id')),
|
||||
UniqueConstraint('project_id', 'attribute_id', name='uq_project_attribute'),
|
||||
)
|
||||
|
||||
|
||||
class AttributeType(BaseModel):
|
||||
__tablename__ = 'attribute_types'
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
type: Mapped[str] = mapped_column(nullable=False, unique=True)
|
||||
name: Mapped[str] = mapped_column(nullable=False, unique=True)
|
||||
is_deleted: Mapped[bool] = mapped_column(default=False)
|
||||
|
||||
attributes: Mapped['Attribute'] = relationship(
|
||||
'Attribute',
|
||||
back_populates='type',
|
||||
lazy='noload',
|
||||
)
|
||||
|
||||
|
||||
class Attribute(BaseModel):
|
||||
__tablename__ = 'attributes'
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
label: Mapped[str] = mapped_column(nullable=False)
|
||||
name: Mapped[str] = mapped_column(nullable=False, index=True)
|
||||
is_applicable_to_group: Mapped[bool] = mapped_column(
|
||||
default=False,
|
||||
comment='Применять ли изменения атрибута карточки ко всем карточкам в группе',
|
||||
)
|
||||
is_shown_on_dashboard: Mapped[bool] = mapped_column(
|
||||
default=False,
|
||||
server_default='0',
|
||||
comment='Отображается ли атрибут на дашборде',
|
||||
)
|
||||
is_highlight_if_expired: Mapped[bool] = mapped_column(
|
||||
default=False,
|
||||
server_default='0',
|
||||
comment='Подсветка атрибута, если Дата/ДатаВремя просрочена',
|
||||
)
|
||||
is_nullable: Mapped[bool] = mapped_column(default=False, nullable=False)
|
||||
default_value: Mapped[bytes] = mapped_column(nullable=True)
|
||||
is_deleted: Mapped[bool] = mapped_column(default=False)
|
||||
description: Mapped[str] = mapped_column(default="", nullable=False)
|
||||
|
||||
projects: Mapped[list['Project']] = relationship(
|
||||
'Project',
|
||||
uselist=True,
|
||||
secondary='project_attribute',
|
||||
back_populates='attributes',
|
||||
lazy='noload',
|
||||
)
|
||||
|
||||
type_id: Mapped[int] = mapped_column(ForeignKey('attribute_types.id'), nullable=False)
|
||||
type: Mapped[AttributeType] = relationship(
|
||||
'AttributeType',
|
||||
back_populates='attributes',
|
||||
lazy='joined',
|
||||
)
|
||||
|
||||
card_attributes: Mapped[list['CardAttribute']] = relationship(
|
||||
'CardAttribute',
|
||||
uselist=True,
|
||||
lazy='noload',
|
||||
back_populates='attribute',
|
||||
)
|
||||
|
||||
|
||||
class CardAttribute(BaseModel):
|
||||
__tablename__ = 'card_attributes'
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
value: Mapped[bytes] = mapped_column(nullable=True)
|
||||
|
||||
card_id: Mapped[int] = mapped_column(ForeignKey('cards.id'), nullable=False)
|
||||
card: Mapped['Card'] = relationship(
|
||||
'Card',
|
||||
back_populates='attributes',
|
||||
lazy='noload',
|
||||
)
|
||||
|
||||
attribute_id: Mapped[int] = mapped_column(ForeignKey('attributes.id'), nullable=False)
|
||||
attribute: Mapped[Attribute] = relationship(
|
||||
'Attribute',
|
||||
back_populates='card_attributes',
|
||||
lazy='joined',
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint('card_id', 'attribute_id', name='uq_card_id_attribute_id'),
|
||||
Index('idx_card_id_attribute_id', 'card_id', 'attribute_id', unique=True)
|
||||
)
|
||||
|
||||
def set_value(self, value):
|
||||
self.value = pickle.dumps(value)
|
||||
|
||||
def get_value(self):
|
||||
return pickle.loads(self.value)
|
||||
123
models/auth.py
123
models/auth.py
@@ -1,13 +1,16 @@
|
||||
from typing import TYPE_CHECKING
|
||||
import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import BigInteger, Table, ForeignKey, Column
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from enums.user import UserRole
|
||||
from models.base import BaseModel
|
||||
from models.work_shifts import WorkShift
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models.payroll import PayRate, PaymentRecord
|
||||
from models import Card, CardEmployees, Message
|
||||
|
||||
role_permissions = Table(
|
||||
'role_permissions',
|
||||
@@ -51,22 +54,36 @@ class Role(BaseModel):
|
||||
# users: Mapped[list["User"]] = relationship("User", back_populates="users")
|
||||
|
||||
|
||||
class UserDepartmentSection(BaseModel):
|
||||
__tablename__ = 'user_department_section'
|
||||
|
||||
section_id: Mapped[int] = mapped_column(ForeignKey('department_sections.id'), primary_key=True)
|
||||
section: Mapped["DepartmentSection"] = relationship(lazy='selectin', back_populates='users')
|
||||
|
||||
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'), primary_key=True)
|
||||
user: Mapped["User"] = relationship(lazy="selectin", back_populates='department_sections')
|
||||
|
||||
is_chief: Mapped[bool] = mapped_column(nullable=False, default=False, server_default='0')
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
__tablename__ = 'users'
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
|
||||
first_name: Mapped[str] = mapped_column(nullable=False, server_default='')
|
||||
second_name: Mapped[str] = mapped_column(nullable=False, server_default='')
|
||||
patronymic: Mapped[str] = mapped_column(nullable=False, server_default='')
|
||||
comment: Mapped[str] = mapped_column(nullable=False, server_default='')
|
||||
telegram_id: Mapped[int] = mapped_column(BigInteger,
|
||||
nullable=False,
|
||||
index=True)
|
||||
phone_number: Mapped[str] = mapped_column(nullable=True)
|
||||
passport_data: Mapped[str] = mapped_column(nullable=True, comment='Серия и номер паспорта')
|
||||
is_admin: Mapped[bool] = mapped_column(nullable=False, default=False)
|
||||
is_blocked: Mapped[bool] = mapped_column(nullable=False, server_default='0')
|
||||
is_deleted: Mapped[bool] = mapped_column(nullable=False, server_default='0')
|
||||
|
||||
role_key: Mapped[int] = mapped_column(ForeignKey('roles.key'), server_default=UserRole.user)
|
||||
role_key: Mapped[str] = mapped_column(ForeignKey('roles.key'), server_default=UserRole.user)
|
||||
role: Mapped["Role"] = relationship(
|
||||
'Role',
|
||||
lazy='joined'
|
||||
@@ -91,6 +108,54 @@ class User(BaseModel):
|
||||
uselist=True,
|
||||
foreign_keys="PaymentRecord.user_id"
|
||||
)
|
||||
work_shifts: Mapped[list["WorkShift"]] = relationship(
|
||||
"WorkShift",
|
||||
back_populates="user",
|
||||
uselist=True,
|
||||
foreign_keys="WorkShift.user_id"
|
||||
)
|
||||
|
||||
managed_cards: Mapped[list["Card"]] = relationship(
|
||||
back_populates="manager",
|
||||
uselist=True,
|
||||
)
|
||||
|
||||
passport_images = relationship(
|
||||
'PassportImage',
|
||||
back_populates='user',
|
||||
lazy='selectin',
|
||||
cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
cards: Mapped[list['CardEmployees']] = relationship(
|
||||
back_populates='user',
|
||||
lazy='selectin'
|
||||
)
|
||||
|
||||
department_sections: Mapped[list[UserDepartmentSection]] = relationship(
|
||||
"UserDepartmentSection",
|
||||
back_populates='user',
|
||||
lazy="noload",
|
||||
)
|
||||
|
||||
messages: Mapped[list['Message']] = relationship(
|
||||
'Message',
|
||||
back_populates='crm_sender',
|
||||
lazy='noload',
|
||||
)
|
||||
|
||||
|
||||
class InviteCode(BaseModel):
|
||||
__tablename__ = 'invite_codes'
|
||||
code: Mapped[str] = mapped_column(primary_key=True)
|
||||
|
||||
is_activated: Mapped[bool] = mapped_column(nullable=False, default=False)
|
||||
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
|
||||
user: Mapped["User"] = relationship('User', foreign_keys=[user_id])
|
||||
|
||||
created_at: Mapped[datetime.datetime] = mapped_column(nullable=False)
|
||||
created_by_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
|
||||
created_by: Mapped["User"] = relationship('User', foreign_keys=[created_by_id])
|
||||
|
||||
|
||||
class Position(BaseModel):
|
||||
@@ -104,3 +169,57 @@ class Position(BaseModel):
|
||||
uselist=False,
|
||||
back_populates='position'
|
||||
)
|
||||
|
||||
|
||||
class PassportImage(BaseModel):
|
||||
__tablename__ = 'passport_images'
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
user_id = mapped_column(ForeignKey('users.id'), nullable=False)
|
||||
user: Mapped["User"] = relationship(back_populates='passport_images')
|
||||
|
||||
image_url: Mapped[str] = mapped_column(nullable=False)
|
||||
|
||||
|
||||
class Department(BaseModel):
|
||||
__tablename__ = 'departments'
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
name: Mapped[str] = mapped_column(unique=True)
|
||||
|
||||
sections: Mapped[list['DepartmentSection']] = relationship(
|
||||
back_populates='department',
|
||||
lazy='selectin',
|
||||
cascade='all, delete',
|
||||
)
|
||||
|
||||
|
||||
class DepartmentSection(BaseModel):
|
||||
__tablename__ = 'department_sections'
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
name: Mapped[str] = mapped_column(index=True)
|
||||
|
||||
department_id: Mapped[Optional[int]] = mapped_column(ForeignKey('departments.id'))
|
||||
department: Mapped["Department"] = relationship(
|
||||
back_populates='sections',
|
||||
lazy='selectin',
|
||||
)
|
||||
|
||||
parent_department_section_id: Mapped[Optional[int]] = mapped_column(ForeignKey('department_sections.id'))
|
||||
parent_department_section: Mapped["DepartmentSection"] = relationship(
|
||||
"DepartmentSection",
|
||||
back_populates="sections",
|
||||
lazy='selectin',
|
||||
remote_side=[id],
|
||||
)
|
||||
sections: Mapped[list["DepartmentSection"]] = relationship(
|
||||
"DepartmentSection",
|
||||
back_populates="parent_department_section",
|
||||
uselist=True,
|
||||
cascade='all, delete',
|
||||
)
|
||||
|
||||
users: Mapped[list[UserDepartmentSection]] = relationship(
|
||||
"UserDepartmentSection",
|
||||
lazy='selectin',
|
||||
back_populates='section',
|
||||
cascade='all, delete',
|
||||
)
|
||||
|
||||
@@ -35,14 +35,17 @@ class BarcodeTemplate(BaseModel):
|
||||
__tablename__ = 'barcode_templates'
|
||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
||||
name = Column(String, nullable=False, index=True, comment='Название шаблона')
|
||||
attributes = relationship('BarcodeTemplateAttribute',
|
||||
secondary=barcode_template_attribute_link,
|
||||
lazy='selectin'
|
||||
)
|
||||
additional_attributes = relationship('BarcodeTemplateAdditionalField',
|
||||
lazy='selectin',
|
||||
back_populates='barcode_template',
|
||||
cascade="all, delete")
|
||||
attributes = relationship(
|
||||
'BarcodeTemplateAttribute',
|
||||
secondary=barcode_template_attribute_link,
|
||||
lazy='selectin',
|
||||
)
|
||||
additional_attributes = relationship(
|
||||
'BarcodeTemplateAdditionalField',
|
||||
lazy='selectin',
|
||||
back_populates='barcode_template',
|
||||
cascade="all, delete",
|
||||
)
|
||||
additional_field = Column(String, nullable=True, comment='Дополнительное поле')
|
||||
|
||||
is_default = Column(Boolean, nullable=False, default=False, comment='По умолчанию')
|
||||
|
||||
@@ -3,7 +3,10 @@ from sqlalchemy.orm import declarative_base, DeclarativeBase
|
||||
|
||||
|
||||
class BaseModel(DeclarativeBase, AsyncAttrs):
|
||||
pass
|
||||
def __repr__(self):
|
||||
if hasattr(self, 'id'):
|
||||
return f'<{self.__class__.__name__} id={self.id}>'
|
||||
return super().__repr__()
|
||||
|
||||
|
||||
metadata = BaseModel.metadata
|
||||
|
||||
@@ -5,18 +5,39 @@ from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from models import BaseModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import Deal
|
||||
from models import Card, CardGroup
|
||||
|
||||
|
||||
class DealBillRequest(BaseModel):
|
||||
__tablename__ = 'deal_bill_requests'
|
||||
class CardBillRequest(BaseModel):
|
||||
__tablename__ = 'card_bill_requests'
|
||||
|
||||
deal_id: Mapped[int] = mapped_column(ForeignKey('deals.id'),
|
||||
nullable=False,
|
||||
primary_key=True,
|
||||
unique=True)
|
||||
deal: Mapped['Deal'] = relationship(back_populates='bill_request')
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
|
||||
card_id: Mapped[int] = mapped_column(
|
||||
ForeignKey('cards.id'),
|
||||
nullable=False,
|
||||
)
|
||||
card: Mapped['Card'] = relationship(back_populates='bill_requests')
|
||||
|
||||
created_at: Mapped[datetime.datetime] = mapped_column(nullable=False)
|
||||
paid: Mapped[bool] = mapped_column(nullable=False, default=False)
|
||||
|
||||
pdf_url: Mapped[str] = mapped_column(nullable=True)
|
||||
invoice_number: Mapped[str] = mapped_column(nullable=True)
|
||||
|
||||
|
||||
class GroupBillRequest(BaseModel):
|
||||
__tablename__ = 'group_bill_requests'
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
|
||||
group_id: Mapped[int] = mapped_column(
|
||||
ForeignKey('card_groups.id'),
|
||||
nullable=False,
|
||||
)
|
||||
group: Mapped['CardGroup'] = relationship(back_populates='bill_requests')
|
||||
|
||||
created_at: Mapped[datetime.datetime] = mapped_column(nullable=False)
|
||||
paid: Mapped[bool] = mapped_column(nullable=False, default=False)
|
||||
|
||||
31
models/board.py
Normal file
31
models/board.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from models.base import BaseModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import Project, CardStatus, Card
|
||||
|
||||
|
||||
class Board(BaseModel):
|
||||
__tablename__ = "boards"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
name: Mapped[str] = mapped_column(nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(nullable=False)
|
||||
is_deleted: Mapped[bool] = mapped_column(default=False)
|
||||
ordinal_number: Mapped[int] = mapped_column(nullable=False)
|
||||
|
||||
project_id: Mapped[int] = mapped_column(ForeignKey('projects.id'), nullable=False)
|
||||
project: Mapped["Project"] = relationship(
|
||||
"Project",
|
||||
back_populates="boards",
|
||||
lazy="selectin",
|
||||
)
|
||||
|
||||
statuses: Mapped[list["CardStatus"]] = relationship("CardStatus", back_populates="board", lazy="selectin", cascade="all,delete")
|
||||
|
||||
cards: Mapped[list["Card"]] = relationship("Card", uselist=True, back_populates="board", lazy="selectin")
|
||||
135
models/card.py
Normal file
135
models/card.py
Normal file
@@ -0,0 +1,135 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship, backref, Mapped, mapped_column
|
||||
|
||||
from models.base import BaseModel
|
||||
from .marketplace import BaseMarketplace
|
||||
from .shipping import Pallet, Box
|
||||
from .shipping_warehouse import ShippingWarehouse
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import (
|
||||
CardBillRequest, User, BaseModel, Board, CardStatus, CardGroup, CardAttribute, Client, CardTag,
|
||||
CardService as CardServiceModel, CardProduct, Chat,
|
||||
)
|
||||
|
||||
|
||||
class Card(BaseModel):
|
||||
__tablename__ = 'cards'
|
||||
|
||||
# region Base card attributes
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
name: Mapped[str] = mapped_column(nullable=False, comment='Название карточки')
|
||||
comment: Mapped[str] = mapped_column(nullable=False, server_default='', comment='Комментарий')
|
||||
created_at: Mapped[datetime] = mapped_column(nullable=False, comment='Дата создания')
|
||||
|
||||
is_deleted: Mapped[bool] = mapped_column(nullable=False, server_default='0', default=False, comment='Удалена')
|
||||
is_completed: Mapped[bool] = mapped_column(nullable=False, server_default='0', default=False, comment='Завершена')
|
||||
|
||||
lexorank: Mapped[str] = mapped_column(nullable=False, comment='Lexorank', index=True)
|
||||
|
||||
board_id: Mapped[int] = mapped_column(ForeignKey('boards.id'), nullable=True, server_default='1')
|
||||
board: Mapped['Board'] = relationship(
|
||||
'Board',
|
||||
back_populates='cards',
|
||||
)
|
||||
|
||||
current_status_id: Mapped[int] = mapped_column(
|
||||
ForeignKey('card_statuses.id'),
|
||||
nullable=False,
|
||||
comment='Текущий статус',
|
||||
)
|
||||
status: Mapped['CardStatus'] = relationship(lazy='selectin')
|
||||
|
||||
status_history = relationship('CardStatusHistory', back_populates='card', cascade="all, delete-orphan")
|
||||
|
||||
attributes: Mapped[list['CardAttribute']] = relationship(
|
||||
'CardAttribute',
|
||||
uselist=True,
|
||||
back_populates='card',
|
||||
lazy='selectin',
|
||||
)
|
||||
|
||||
group: Mapped[Optional["CardGroup"]] = relationship(
|
||||
'CardGroup',
|
||||
secondary='card_relations',
|
||||
lazy='joined',
|
||||
back_populates='cards'
|
||||
)
|
||||
|
||||
tags: Mapped[list['CardTag']] = relationship(
|
||||
'CardTag',
|
||||
secondary='cards_card_tags',
|
||||
back_populates='cards',
|
||||
lazy='selectin',
|
||||
)
|
||||
# endregion
|
||||
|
||||
# region Attributes handled by modules
|
||||
|
||||
# module servicesAndProducts
|
||||
is_locked: Mapped[bool] = mapped_column(default=False, server_default='0')
|
||||
is_services_profit_accounted: Mapped[bool] = mapped_column(default=True, server_default='1')
|
||||
|
||||
shipping_warehouse_id: Mapped[int] = mapped_column(ForeignKey('shipping_warehouses.id'), nullable=True)
|
||||
shipping_warehouse: Mapped["ShippingWarehouse"] = relationship()
|
||||
|
||||
base_marketplace_key: Mapped[str] = mapped_column(ForeignKey("base_marketplaces.key"), nullable=True)
|
||||
base_marketplace: Mapped["BaseMarketplace"] = relationship(lazy="joined")
|
||||
|
||||
services: Mapped[list['CardServiceModel']] = relationship(
|
||||
'CardService',
|
||||
back_populates='card',
|
||||
cascade="all, delete-orphan",
|
||||
order_by="desc(CardService.service_id)"
|
||||
)
|
||||
|
||||
products: Mapped[list['CardProduct']] = relationship(
|
||||
'CardProduct',
|
||||
back_populates='card',
|
||||
cascade="all, delete-orphan",
|
||||
order_by="desc(CardProduct.product_id)"
|
||||
)
|
||||
|
||||
bill_requests: Mapped[list['CardBillRequest']] = relationship(
|
||||
back_populates='card',
|
||||
lazy='selectin',
|
||||
uselist=True,
|
||||
)
|
||||
|
||||
# module client
|
||||
client_id: Mapped[Optional[int]] = mapped_column(
|
||||
ForeignKey('clients.id', ondelete='CASCADE'),
|
||||
nullable=True,
|
||||
comment='ID клиента',
|
||||
)
|
||||
client: Mapped['Client'] = relationship('Client', backref=backref('cards', cascade="all, delete-orphan"))
|
||||
|
||||
# module managers
|
||||
manager_id: Mapped[int] = mapped_column(ForeignKey('users.id'), nullable=True)
|
||||
manager: Mapped[Optional["User"]] = relationship(back_populates='managed_cards', lazy='joined')
|
||||
|
||||
# module shipment
|
||||
pallets: Mapped[list[Pallet]] = relationship(back_populates='card', lazy='selectin')
|
||||
boxes: Mapped[list[Box]] = relationship(back_populates='card', lazy='selectin')
|
||||
|
||||
# module employees
|
||||
employees: Mapped[list['CardEmployees']] = relationship(back_populates='card', lazy='selectin')
|
||||
|
||||
# module chat
|
||||
chat: Mapped[Optional['Chat']] = relationship(back_populates='card', lazy='joined')
|
||||
|
||||
# endregion
|
||||
|
||||
|
||||
class CardEmployees(BaseModel):
|
||||
__tablename__ = 'card_employees'
|
||||
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'), primary_key=True)
|
||||
user: Mapped['User'] = relationship('User', back_populates='cards', lazy='selectin')
|
||||
|
||||
card_id: Mapped[int] = mapped_column(ForeignKey('cards.id'), primary_key=True)
|
||||
card: Mapped[Card] = relationship('Card', back_populates='employees', lazy='selectin')
|
||||
|
||||
created_at: Mapped[datetime] = mapped_column()
|
||||
40
models/card_group.py
Normal file
40
models/card_group.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import ForeignKey, Table, Column
|
||||
from sqlalchemy.orm import mapped_column, Mapped, relationship
|
||||
|
||||
from models import BaseModel
|
||||
from models import GroupBillRequest
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import Card
|
||||
|
||||
|
||||
class CardGroup(BaseModel):
|
||||
__tablename__ = 'card_groups'
|
||||
id: Mapped[int] = mapped_column(
|
||||
primary_key=True
|
||||
)
|
||||
name: Mapped[Optional[str]] = mapped_column(
|
||||
nullable=True
|
||||
)
|
||||
lexorank: Mapped[str] = mapped_column(
|
||||
nullable=False
|
||||
)
|
||||
cards: Mapped[list["Card"]] = relationship(
|
||||
back_populates='group',
|
||||
secondary='card_relations'
|
||||
)
|
||||
bill_requests: Mapped[list['GroupBillRequest']] = relationship(
|
||||
back_populates='group',
|
||||
lazy='selectin',
|
||||
uselist=True,
|
||||
)
|
||||
|
||||
|
||||
card_relations = Table(
|
||||
'card_relations',
|
||||
BaseModel.metadata,
|
||||
Column('card_id', ForeignKey('cards.id'), primary_key=True, unique=True),
|
||||
Column('group_id', ForeignKey('card_groups.id'), primary_key=True)
|
||||
)
|
||||
63
models/card_tag.py
Normal file
63
models/card_tag.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import ForeignKey, Column, Table, Index
|
||||
from sqlalchemy.orm import mapped_column, Mapped, relationship
|
||||
|
||||
from models import BaseModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import Project, Card
|
||||
|
||||
cards_card_tags = Table(
|
||||
'cards_card_tags',
|
||||
BaseModel.metadata,
|
||||
Column('card_id', ForeignKey('cards.id'), primary_key=True),
|
||||
Column('card_tag_id', ForeignKey('card_tags.id'), primary_key=True),
|
||||
)
|
||||
|
||||
|
||||
class CardTagColor(BaseModel):
|
||||
__tablename__ = "card_tag_colors"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
label: Mapped[str] = mapped_column(unique=True)
|
||||
color: Mapped[str] = mapped_column(unique=True)
|
||||
background_color: Mapped[str] = mapped_column(unique=True)
|
||||
is_deleted: Mapped[bool] = mapped_column(default=False, nullable=False)
|
||||
|
||||
|
||||
class CardTag(BaseModel):
|
||||
__tablename__ = 'card_tags'
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
name: Mapped[str] = mapped_column(nullable=False)
|
||||
is_deleted: Mapped[bool] = mapped_column(default=False, server_default='0')
|
||||
|
||||
project_id: Mapped[int] = mapped_column(
|
||||
ForeignKey('projects.id'),
|
||||
nullable=False,
|
||||
)
|
||||
project: Mapped['Project'] = relationship(
|
||||
'Project',
|
||||
back_populates='tags',
|
||||
lazy='noload',
|
||||
)
|
||||
|
||||
cards: Mapped[list['Card']] = relationship(
|
||||
secondary='cards_card_tags',
|
||||
lazy='noload',
|
||||
back_populates='tags',
|
||||
)
|
||||
|
||||
tag_color_id: Mapped[int] = mapped_column(
|
||||
ForeignKey('card_tag_colors.id'),
|
||||
nullable=False,
|
||||
)
|
||||
tag_color: Mapped[CardTagColor] = relationship(
|
||||
'CardTagColor',
|
||||
lazy='selectin',
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_card_name_project_id', 'name', 'project_id', 'is_deleted'),
|
||||
)
|
||||
174
models/chat.py
Normal file
174
models/chat.py
Normal file
@@ -0,0 +1,174 @@
|
||||
import enum
|
||||
from datetime import datetime
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import ForeignKey, BigInteger, Enum, Uuid
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from models import BaseModel, User
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import Client, Card
|
||||
|
||||
|
||||
class TgUser(BaseModel):
|
||||
__tablename__ = 'tg_users'
|
||||
|
||||
id: Mapped[int] = mapped_column(
|
||||
BigInteger(),
|
||||
primary_key=True,
|
||||
comment='Telegram user ID',
|
||||
)
|
||||
username: Mapped[Optional[str]] = mapped_column(nullable=True)
|
||||
first_name: Mapped[Optional[str]] = mapped_column(nullable=True)
|
||||
last_name: Mapped[Optional[str]] = mapped_column(nullable=True)
|
||||
|
||||
messages: Mapped['Message'] = relationship(
|
||||
'Message',
|
||||
lazy='noload',
|
||||
back_populates='tg_sender',
|
||||
)
|
||||
|
||||
|
||||
class TgGroup(BaseModel):
|
||||
__tablename__ = 'tg_groups'
|
||||
|
||||
id: Mapped[UUID] = mapped_column(Uuid, primary_key=True)
|
||||
|
||||
tg_group_id: Mapped[int] = mapped_column(
|
||||
BigInteger(),
|
||||
nullable=False,
|
||||
unique=True,
|
||||
)
|
||||
tg_invite_link: Mapped[str] = mapped_column(nullable=False)
|
||||
|
||||
client_id: Mapped[Optional[int]] = mapped_column(
|
||||
ForeignKey('clients.id'),
|
||||
unique=True,
|
||||
)
|
||||
client: Mapped[Optional['Client']] = relationship(
|
||||
'Client',
|
||||
lazy='joined',
|
||||
back_populates='tg_group',
|
||||
)
|
||||
|
||||
chats: Mapped[list['Chat']] = relationship(
|
||||
'Chat',
|
||||
lazy='noload',
|
||||
back_populates='tg_group',
|
||||
)
|
||||
|
||||
|
||||
class Chat(BaseModel):
|
||||
__tablename__ = 'chats'
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
|
||||
tg_topic_id: Mapped[int] = mapped_column(nullable=False)
|
||||
|
||||
card_id: Mapped[Optional[int]] = mapped_column(
|
||||
ForeignKey('cards.id'),
|
||||
unique=True,
|
||||
)
|
||||
card: Mapped[Optional['Card']] = relationship(
|
||||
'Card',
|
||||
lazy='joined',
|
||||
back_populates='chat',
|
||||
)
|
||||
|
||||
client_id: Mapped[Optional[int]] = mapped_column(
|
||||
ForeignKey('clients.id'),
|
||||
unique=True,
|
||||
)
|
||||
client: Mapped[Optional['Client']] = relationship(
|
||||
'Client',
|
||||
lazy='joined',
|
||||
back_populates='chat',
|
||||
)
|
||||
|
||||
tg_group_id: Mapped[UUID] = mapped_column(
|
||||
ForeignKey('tg_groups.id'),
|
||||
nullable=False,
|
||||
)
|
||||
tg_group: Mapped[TgGroup] = relationship(
|
||||
'TgGroup',
|
||||
lazy='joined',
|
||||
back_populates='chats',
|
||||
)
|
||||
|
||||
messages: Mapped[list['Message']] = relationship(
|
||||
'Message',
|
||||
lazy='selectin',
|
||||
back_populates='chat',
|
||||
order_by='Message.created_at.desc()',
|
||||
)
|
||||
|
||||
|
||||
class MessageFile(BaseModel):
|
||||
__tablename__ = 'message_files'
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
file_path: Mapped[str] = mapped_column(nullable=False)
|
||||
type: Mapped[Optional[str]] = mapped_column(nullable=True)
|
||||
file_name: Mapped[str] = mapped_column(nullable=False)
|
||||
file_size: Mapped[int] = mapped_column(BigInteger(), nullable=True, comment='Размер файла в байтах')
|
||||
|
||||
message_id: Mapped[int] = mapped_column(ForeignKey('messages.id'))
|
||||
message: Mapped['Message'] = relationship(
|
||||
'Message',
|
||||
lazy='noload',
|
||||
back_populates='file',
|
||||
)
|
||||
|
||||
|
||||
class MessageStatus(enum.Enum):
|
||||
sending = 'SENDING'
|
||||
success = 'SUCCESS'
|
||||
error = 'ERROR'
|
||||
|
||||
|
||||
class Message(BaseModel):
|
||||
__tablename__ = 'messages'
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
tg_message_id: Mapped[Optional[int]] = mapped_column(nullable=True)
|
||||
|
||||
text: Mapped[str] = mapped_column(nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(nullable=False)
|
||||
status: Mapped[MessageStatus] = mapped_column(Enum(MessageStatus), nullable=False)
|
||||
is_deleted: Mapped[bool] = mapped_column(default=False, server_default='0', nullable=False)
|
||||
is_edited: Mapped[bool] = mapped_column(default=False, server_default='0', nullable=False)
|
||||
|
||||
tg_sender_id: Mapped[Optional[int]] = mapped_column(
|
||||
ForeignKey('tg_users.id'),
|
||||
nullable=True,
|
||||
)
|
||||
tg_sender: Mapped[TgUser] = relationship(
|
||||
'TgUser',
|
||||
lazy='selectin',
|
||||
back_populates='messages',
|
||||
)
|
||||
|
||||
crm_sender_id: Mapped[Optional[int]] = mapped_column(
|
||||
ForeignKey('users.id'),
|
||||
nullable=True,
|
||||
)
|
||||
crm_sender: Mapped[Optional['User']] = relationship(
|
||||
'User',
|
||||
lazy='selectin',
|
||||
back_populates='messages',
|
||||
)
|
||||
|
||||
chat_id: Mapped[int] = mapped_column(ForeignKey('chats.id'))
|
||||
chat: Mapped[Chat] = relationship(
|
||||
'Chat',
|
||||
lazy='noload',
|
||||
back_populates='messages',
|
||||
)
|
||||
|
||||
file: Mapped[Optional[MessageFile]] = relationship(
|
||||
'MessageFile',
|
||||
back_populates='message',
|
||||
lazy='selectin',
|
||||
)
|
||||
@@ -1,62 +1,67 @@
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from datetime import datetime
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship, Mapped, mapped_column
|
||||
|
||||
from models import BaseModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import ResidualPallet, ResidualBox, Product, BarcodeTemplate, User, TgGroup, Chat
|
||||
|
||||
|
||||
class Client(BaseModel):
|
||||
__tablename__ = 'clients'
|
||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
||||
name = Column(String, nullable=False, unique=True, comment='Название клиента')
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
name: Mapped[str] = mapped_column(nullable=False, unique=True, comment='Название клиента')
|
||||
|
||||
# TODO replace with additional model
|
||||
company_name = Column(String,
|
||||
nullable=False,
|
||||
server_default='',
|
||||
comment='Название компании')
|
||||
company_name: Mapped[str] = mapped_column(
|
||||
nullable=False,
|
||||
server_default='',
|
||||
comment='Название компании',
|
||||
)
|
||||
|
||||
created_at = Column(DateTime, nullable=False, comment='Дата создания')
|
||||
created_at: Mapped[datetime] = mapped_column(nullable=False, comment='Дата создания')
|
||||
is_deleted: Mapped[bool] = mapped_column(
|
||||
nullable=False,
|
||||
default=False,
|
||||
server_default='0',
|
||||
)
|
||||
|
||||
products = relationship('Product', back_populates='client')
|
||||
details = relationship('ClientDetails', uselist=False, back_populates='client', cascade='all, delete',
|
||||
lazy='joined')
|
||||
products: Mapped[list['Product']] = relationship('Product', back_populates='client')
|
||||
details: Mapped['ClientDetails'] = relationship(
|
||||
uselist=False,
|
||||
back_populates='client',
|
||||
cascade='all, delete',
|
||||
lazy='joined',
|
||||
)
|
||||
|
||||
barcode_template_id = Column(Integer, ForeignKey('barcode_templates.id'), nullable=True)
|
||||
barcode_template = relationship('BarcodeTemplate', lazy='selectin')
|
||||
# users = relationship('ClientUser', back_populates='client', cascade='all, delete')
|
||||
barcode_template_id: Mapped[int] = mapped_column(ForeignKey('barcode_templates.id'), nullable=True)
|
||||
barcode_template: Mapped['BarcodeTemplate'] = relationship('BarcodeTemplate', lazy='selectin')
|
||||
|
||||
comment: Mapped[Optional[str]] = mapped_column(nullable=True, server_default=None, comment='Комментарий')
|
||||
|
||||
pallets: Mapped[list['ResidualPallet']] = relationship(back_populates='client', lazy='selectin')
|
||||
boxes: Mapped[list['ResidualBox']] = relationship(back_populates='client', lazy='selectin')
|
||||
|
||||
tg_group: Mapped[Optional['TgGroup']] = relationship('TgGroup', back_populates='client', lazy='joined')
|
||||
chat: Mapped[Optional['Chat']] = relationship('Chat', back_populates='client', lazy='joined')
|
||||
|
||||
|
||||
class ClientDetails(BaseModel):
|
||||
__tablename__ = 'client_details'
|
||||
|
||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
|
||||
client_id = Column(Integer, ForeignKey('clients.id'), unique=True, nullable=False, comment='ID клиента')
|
||||
client = relationship('Client', back_populates='details', cascade='all, delete', uselist=False)
|
||||
client_id: Mapped[int] = mapped_column(ForeignKey('clients.id'), unique=True, nullable=False, comment='ID клиента')
|
||||
client: Mapped[Client] = relationship('Client', back_populates='details', cascade='all, delete', uselist=False)
|
||||
|
||||
telegram = Column(String)
|
||||
phone_number = Column(String)
|
||||
inn = Column(String)
|
||||
email = Column(String)
|
||||
telegram: Mapped[Optional[str]] = mapped_column()
|
||||
phone_number: Mapped[Optional[str]] = mapped_column()
|
||||
inn: Mapped[Optional[str]] = mapped_column()
|
||||
email: Mapped[Optional[str]] = mapped_column()
|
||||
|
||||
last_modified_at = Column(DateTime, nullable=False)
|
||||
last_modified_at: Mapped[datetime] = mapped_column(nullable=False)
|
||||
|
||||
modified_by_user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
|
||||
modified_by_user = relationship('User')
|
||||
|
||||
|
||||
# class ClientContact(BaseModel):
|
||||
# __tablename__ = 'client_contact'
|
||||
# id: Mapped[int] = mapped_column(primary_key=True)
|
||||
#
|
||||
# client_id: Mapped[int] = mapped_column(ForeignKey('clients.id'))
|
||||
# client: Mapped["Client"] = relationship('Client', back_populates='users')
|
||||
#
|
||||
# first_name: Mapped[str] = mapped_column()
|
||||
# last_name: Mapped[str] = mapped_column()
|
||||
#
|
||||
# telegram: Mapped[str] = mapped_column()
|
||||
# phone_number: Mapped[str] = mapped_column()
|
||||
# email: Mapped[str] = mapped_column()
|
||||
# inn: Mapped[str] = mapped_column()
|
||||
#
|
||||
modified_by_user_id: Mapped[int] = mapped_column(ForeignKey('users.id'), nullable=False)
|
||||
modified_by_user: Mapped['User'] = relationship('User')
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
from enum import IntEnum, unique
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Boolean
|
||||
from sqlalchemy.orm import relationship, backref, Mapped, mapped_column
|
||||
|
||||
from models.base import BaseModel
|
||||
from .marketplace import BaseMarketplace
|
||||
from .shipping_warehouse import ShippingWarehouse
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import DealBillRequest
|
||||
|
||||
|
||||
@unique
|
||||
class DealStatus(IntEnum):
|
||||
CREATED = 0
|
||||
AWAITING_ACCEPTANCE = 1
|
||||
PACKAGING = 2
|
||||
AWAITING_SHIPMENT = 3
|
||||
AWAITING_PAYMENT = 4
|
||||
COMPLETED = 5
|
||||
CANCELLED = 6
|
||||
|
||||
|
||||
class Deal(BaseModel):
|
||||
__tablename__ = 'deals'
|
||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
||||
name = Column(String, nullable=False, comment='Название сделки')
|
||||
created_at = Column(DateTime, nullable=False, comment='Дата создания')
|
||||
current_status = Column(Integer, nullable=False, comment='Текущий статус')
|
||||
|
||||
client_id = Column(Integer, ForeignKey('clients.id', ondelete='CASCADE'), nullable=False, comment='ID клиента')
|
||||
client = relationship('Client', backref=backref('deals', cascade="all, delete-orphan"))
|
||||
|
||||
status_history = relationship('DealStatusHistory', back_populates='deal', cascade="all, delete-orphan")
|
||||
|
||||
is_deleted = Column(Boolean, nullable=False, server_default='0', default=False, comment='Удалена')
|
||||
is_completed = Column(Boolean, nullable=False, server_default='0', default=False, comment='Завершена')
|
||||
is_locked: Mapped[bool] = mapped_column(default=False, server_default='0')
|
||||
|
||||
shipping_warehouse_id: Mapped[int] = mapped_column(ForeignKey('shipping_warehouses.id'), nullable=True)
|
||||
shipping_warehouse: Mapped["ShippingWarehouse"] = relationship()
|
||||
|
||||
base_marketplace_key: Mapped[str] = mapped_column(ForeignKey("base_marketplaces.key"), nullable=True)
|
||||
base_marketplace: Mapped["BaseMarketplace"] = relationship(lazy="joined")
|
||||
|
||||
services = relationship(
|
||||
'DealService',
|
||||
back_populates='deal',
|
||||
cascade="all, delete-orphan",
|
||||
order_by="desc(DealService.service_id)"
|
||||
)
|
||||
|
||||
products = relationship(
|
||||
'DealProduct',
|
||||
back_populates='deal',
|
||||
cascade="all, delete-orphan",
|
||||
order_by="desc(DealProduct.product_id)"
|
||||
)
|
||||
|
||||
# TODO remake with sequence
|
||||
lexorank = Column(String, nullable=False, comment='Lexorank', index=True)
|
||||
|
||||
comment = Column(String, nullable=False, server_default='', comment='Коментарий к заданию')
|
||||
bill_request: Mapped[Optional['DealBillRequest']] = relationship(back_populates='deal', lazy='joined')
|
||||
|
||||
|
||||
class DealStatusHistory(BaseModel):
|
||||
__tablename__ = 'deals_status_history'
|
||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
||||
|
||||
deal_id = Column(Integer, ForeignKey('deals.id'), nullable=False, comment='ID сделки')
|
||||
deal = relationship('Deal', back_populates='status_history')
|
||||
|
||||
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
|
||||
user = relationship('User')
|
||||
|
||||
changed_at = Column(DateTime, nullable=False, comment='Дата и время когда произошла смена статуса')
|
||||
from_status = Column(Integer, nullable=False, comment='Предыдущий статус')
|
||||
to_status = Column(Integer, nullable=False, comment='Новый статус')
|
||||
|
||||
next_status_deadline = Column(DateTime,
|
||||
comment='Дедлайн до которого сделку нужно перевести на следующий этап')
|
||||
comment = Column(String, nullable=False, comment='Коментарий', server_default='')
|
||||
@@ -1,4 +1,4 @@
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy import ForeignKey, BigInteger
|
||||
from sqlalchemy.orm import Mapped, relationship, mapped_column
|
||||
|
||||
from models import BaseModel
|
||||
@@ -27,7 +27,8 @@ class OzonProduct(BaseModel):
|
||||
product_id: Mapped[int] = mapped_column(ForeignKey('products.id'), primary_key=True)
|
||||
product: Mapped["Product"] = relationship()
|
||||
|
||||
ozon_product_id: Mapped[int] = mapped_column(nullable=False)
|
||||
ozon_product_id: Mapped[int] = mapped_column(BigInteger, nullable=False)
|
||||
|
||||
|
||||
class YandexProduct(BaseModel):
|
||||
__tablename__ = 'yandex_products'
|
||||
|
||||
61
models/module.py
Normal file
61
models/module.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Table, Column, ForeignKey
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from models.base import BaseModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import Project
|
||||
|
||||
|
||||
project_module = Table(
|
||||
'project_module',
|
||||
BaseModel.metadata,
|
||||
Column('project_id', ForeignKey('projects.id'), primary_key=True),
|
||||
Column('module_id', ForeignKey('modules.id'), primary_key=True),
|
||||
)
|
||||
|
||||
|
||||
module_dependencies = Table(
|
||||
'module_dependencies',
|
||||
BaseModel.metadata,
|
||||
Column('module_id', ForeignKey('modules.id'), primary_key=True),
|
||||
Column('depends_on_id', ForeignKey('modules.id'), primary_key=True),
|
||||
)
|
||||
|
||||
|
||||
class Module(BaseModel):
|
||||
__tablename__ = 'modules'
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
key: Mapped[str] = mapped_column(unique=True, nullable=False)
|
||||
label: Mapped[str] = mapped_column(nullable=False)
|
||||
icon_name: Mapped[Optional[str]] = mapped_column(unique=True, nullable=False)
|
||||
is_deleted: Mapped[bool] = mapped_column(default=False)
|
||||
|
||||
depends_on: Mapped[list['Module']] = relationship(
|
||||
'Module',
|
||||
secondary=module_dependencies,
|
||||
primaryjoin='Module.id == module_dependencies.c.module_id',
|
||||
secondaryjoin='Module.id == module_dependencies.c.depends_on_id',
|
||||
back_populates='depended_on_by',
|
||||
lazy='immediate',
|
||||
)
|
||||
|
||||
depended_on_by: Mapped[list['Module']] = relationship(
|
||||
'Module',
|
||||
secondary='module_dependencies',
|
||||
primaryjoin='Module.id == module_dependencies.c.depends_on_id',
|
||||
secondaryjoin='Module.id == module_dependencies.c.module_id',
|
||||
back_populates='depends_on',
|
||||
lazy='noload',
|
||||
)
|
||||
|
||||
projects: Mapped[list['Project']] = relationship(
|
||||
'Project',
|
||||
uselist=True,
|
||||
secondary='project_module',
|
||||
back_populates='modules',
|
||||
lazy='noload',
|
||||
)
|
||||
@@ -47,5 +47,5 @@ class PaymentRecord(BaseModel):
|
||||
payroll_scheme_key: Mapped[int] = mapped_column(ForeignKey("payroll_schemas.key"), nullable=False)
|
||||
payroll_scheme: Mapped["PayrollScheme"] = relationship()
|
||||
|
||||
work_units: Mapped[int] = mapped_column(nullable=False)
|
||||
work_units: Mapped[float] = mapped_column(nullable=False)
|
||||
amount: Mapped[float] = mapped_column(Double, nullable=False)
|
||||
|
||||
@@ -1,63 +1,93 @@
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import Column, Integer, String, ForeignKey, Sequence
|
||||
from sqlalchemy.orm import relationship, Mapped
|
||||
from sqlalchemy.testing.schema import mapped_column
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship, Mapped, mapped_column
|
||||
|
||||
from models import BaseModel
|
||||
from models.base import BaseModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import Marketplace
|
||||
from models import Client, BarcodeTemplate, WildberriesProduct, OzonProduct
|
||||
|
||||
|
||||
class Product(BaseModel):
|
||||
__tablename__ = 'products'
|
||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
||||
name = Column(String, nullable=False, index=True)
|
||||
article = Column(String, nullable=False, default='', server_default='', index=True)
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
name: Mapped[str] = mapped_column(nullable=False, index=True)
|
||||
article: Mapped[str] = mapped_column(nullable=False, default='', server_default='', index=True)
|
||||
factory_article: Mapped[str] = mapped_column(nullable=False, default='', server_default='', index=True)
|
||||
|
||||
client_id = Column(Integer, ForeignKey('clients.id'), nullable=False, comment='ID сделки')
|
||||
client = relationship('Client', back_populates='products')
|
||||
barcodes = relationship('ProductBarcode', back_populates='product', cascade="all, delete-orphan")
|
||||
client_id: Mapped[int] = mapped_column(ForeignKey('clients.id'), nullable=False)
|
||||
client: Mapped['Client'] = relationship('Client', back_populates='products')
|
||||
barcodes: Mapped[list['ProductBarcode']] = relationship(
|
||||
'ProductBarcode',
|
||||
back_populates='product',
|
||||
cascade='all, delete-orphan',
|
||||
)
|
||||
|
||||
barcode_template_id = Column(Integer, ForeignKey('barcode_templates.id'), nullable=True)
|
||||
barcode_template = relationship('BarcodeTemplate', lazy='joined')
|
||||
barcode_template_id: Mapped[Optional[int]] = mapped_column(ForeignKey('barcode_templates.id'), nullable=True)
|
||||
barcode_template: Mapped['BarcodeTemplate'] = relationship('BarcodeTemplate', lazy='joined')
|
||||
|
||||
barcode_image: Mapped['ProductBarcodeImage'] = relationship(
|
||||
'ProductBarcodeImage',
|
||||
back_populates='product',
|
||||
lazy='joined',
|
||||
uselist=False,
|
||||
)
|
||||
|
||||
# Attributes
|
||||
# TODO move to another table
|
||||
brand = Column(String, nullable=True, comment='Бренд')
|
||||
color = Column(String, nullable=True, comment='Цвет')
|
||||
composition = Column(String, nullable=True, comment='Состав')
|
||||
size = Column(String, nullable=True, comment='Размер')
|
||||
additional_info = Column(String, nullable=True, comment='Дополнительное поле')
|
||||
images = relationship('ProductImage',
|
||||
back_populates='product',
|
||||
lazy='selectin',
|
||||
cascade="all, delete-orphan")
|
||||
brand: Mapped[Optional[str]] = mapped_column(nullable=True, comment='Бренд')
|
||||
color: Mapped[Optional[str]] = mapped_column(nullable=True, comment='Цвет')
|
||||
composition: Mapped[Optional[str]] = mapped_column(nullable=True, comment='Состав')
|
||||
size: Mapped[Optional[str]] = mapped_column(nullable=True, comment='Размер')
|
||||
additional_info: Mapped[Optional[str]] = mapped_column(nullable=True, comment='Дополнительное поле')
|
||||
images: Mapped[list['ProductImage']] = relationship(
|
||||
'ProductImage',
|
||||
back_populates='product',
|
||||
lazy='selectin',
|
||||
cascade='all, delete-orphan',
|
||||
)
|
||||
|
||||
wildberries_products = relationship('WildberriesProduct',
|
||||
back_populates='product',
|
||||
cascade="all, delete-orphan",
|
||||
uselist=True)
|
||||
wildberries_products: Mapped[list['WildberriesProduct']] = relationship(
|
||||
'WildberriesProduct',
|
||||
back_populates='product',
|
||||
cascade='all, delete-orphan',
|
||||
uselist=True,
|
||||
)
|
||||
|
||||
ozon_products = relationship('OzonProduct',
|
||||
back_populates='product',
|
||||
cascade="all, delete-orphan",
|
||||
uselist=True)
|
||||
ozon_products: Mapped[list['OzonProduct']] = relationship(
|
||||
'OzonProduct',
|
||||
back_populates='product',
|
||||
cascade='all, delete-orphan',
|
||||
uselist=True,
|
||||
)
|
||||
|
||||
|
||||
class ProductImage(BaseModel):
|
||||
__tablename__ = 'product_images'
|
||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
||||
product_id = Column(Integer, ForeignKey('products.id'), nullable=False)
|
||||
product: Mapped["Product"] = relationship(back_populates='images')
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
product_id: Mapped[int] = mapped_column(ForeignKey('products.id'), nullable=False)
|
||||
product: Mapped['Product'] = relationship(back_populates='images')
|
||||
|
||||
image_url = Column(String, nullable=False)
|
||||
image_url: Mapped[str] = mapped_column(nullable=False)
|
||||
|
||||
|
||||
class ProductBarcode(BaseModel):
|
||||
__tablename__ = 'product_barcodes'
|
||||
product_id = Column(Integer, ForeignKey('products.id'), nullable=False, comment='ID товара', primary_key=True)
|
||||
product: Mapped["Product"] = relationship(back_populates='barcodes')
|
||||
product_id: Mapped[int] = mapped_column(
|
||||
ForeignKey('products.id'),
|
||||
nullable=False,
|
||||
comment='ID товара',
|
||||
primary_key=True,
|
||||
)
|
||||
product: Mapped['Product'] = relationship(back_populates='barcodes')
|
||||
|
||||
barcode = Column(String, nullable=False, index=True, comment='ШК товара', primary_key=True)
|
||||
barcode: Mapped[str] = mapped_column(nullable=False, index=True, comment='ШК товара', primary_key=True)
|
||||
|
||||
|
||||
class ProductBarcodeImage(BaseModel):
|
||||
__tablename__ = 'product_barcode_images'
|
||||
product_id: Mapped[int] = mapped_column(ForeignKey('products.id'), primary_key=True, comment='ID товара')
|
||||
product: Mapped['Product'] = relationship(back_populates='barcode_image')
|
||||
|
||||
filename: Mapped[str] = mapped_column(nullable=False)
|
||||
|
||||
50
models/project.py
Normal file
50
models/project.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy.orm import mapped_column, Mapped, relationship
|
||||
|
||||
from models.base import BaseModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from board import Board
|
||||
from attribute import Attribute
|
||||
from module import Module
|
||||
from card_tag import CardTag
|
||||
|
||||
|
||||
class Project(BaseModel):
|
||||
__tablename__ = 'projects'
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
name: Mapped[str] = mapped_column(nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(nullable=False)
|
||||
is_deleted: Mapped[bool] = mapped_column(default=False)
|
||||
|
||||
boards: Mapped[list['Board']] = relationship(
|
||||
'Board',
|
||||
back_populates='project',
|
||||
lazy='noload',
|
||||
)
|
||||
|
||||
attributes: Mapped[list['Attribute']] = relationship(
|
||||
'Attribute',
|
||||
secondary='project_attribute',
|
||||
back_populates='projects',
|
||||
lazy='selectin',
|
||||
)
|
||||
|
||||
modules: Mapped[list['Module']] = relationship(
|
||||
'Module',
|
||||
secondary='project_module',
|
||||
back_populates='projects',
|
||||
lazy='selectin',
|
||||
order_by='asc(Module.id)',
|
||||
)
|
||||
|
||||
tags: Mapped[list['CardTag']] = relationship(
|
||||
'CardTag',
|
||||
back_populates='project',
|
||||
primaryjoin="and_(Project.id == CardTag.project_id, CardTag.is_deleted == False)",
|
||||
order_by='asc(CardTag.id)',
|
||||
lazy='selectin',
|
||||
)
|
||||
62
models/residues.py
Normal file
62
models/residues.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from models import BaseModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models import Product, Client
|
||||
|
||||
|
||||
class ResidualPallet(BaseModel):
|
||||
__tablename__ = 'residual_pallets'
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
created_at: Mapped[datetime.datetime] = mapped_column(nullable=False)
|
||||
|
||||
client_id: Mapped[Optional[int]] = mapped_column(ForeignKey('clients.id'))
|
||||
client: Mapped['Client'] = relationship(back_populates='pallets')
|
||||
|
||||
boxes: Mapped[list['ResidualBox']] = relationship(
|
||||
back_populates='pallet',
|
||||
uselist=True,
|
||||
lazy='joined',
|
||||
cascade='all, delete-orphan',
|
||||
)
|
||||
|
||||
residual_products: Mapped[list['ResidualProduct']] = relationship(
|
||||
back_populates='pallet',
|
||||
uselist=True,
|
||||
lazy='joined',
|
||||
cascade='all, delete-orphan',
|
||||
)
|
||||
|
||||
|
||||
class ResidualProduct(BaseModel):
|
||||
__tablename__ = 'residual_products'
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
quantity: Mapped[int] = mapped_column()
|
||||
|
||||
product_id: Mapped[int] = mapped_column(ForeignKey('products.id'))
|
||||
product: Mapped['Product'] = relationship(lazy='joined')
|
||||
|
||||
pallet_id: Mapped[Optional[int]] = mapped_column(ForeignKey('residual_pallets.id'))
|
||||
pallet: Mapped[ResidualPallet] = relationship(lazy='joined', back_populates="residual_products")
|
||||
|
||||
box_id: Mapped[Optional[int]] = mapped_column(ForeignKey('residual_boxes.id'))
|
||||
box: Mapped['ResidualBox'] = relationship(back_populates='residual_products')
|
||||
|
||||
|
||||
class ResidualBox(BaseModel):
|
||||
__tablename__ = 'residual_boxes'
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
created_at: Mapped[datetime.datetime] = mapped_column(nullable=False)
|
||||
|
||||
pallet_id: Mapped[Optional[int]] = mapped_column(ForeignKey('residual_pallets.id'))
|
||||
pallet: Mapped[ResidualPallet] = relationship(back_populates='boxes')
|
||||
|
||||
client_id: Mapped[Optional[int]] = mapped_column(ForeignKey('clients.id'))
|
||||
client: Mapped['Client'] = relationship(back_populates='boxes')
|
||||
|
||||
residual_products: Mapped[list['ResidualProduct']] = relationship(back_populates='box')
|
||||
@@ -1,111 +1,138 @@
|
||||
from sqlalchemy import Table, Column, Integer, ForeignKey, ForeignKeyConstraint, UniqueConstraint
|
||||
from sqlalchemy.orm import relationship
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlalchemy import Table, Column, ForeignKey, ForeignKeyConstraint, UniqueConstraint
|
||||
from sqlalchemy.orm import relationship, mapped_column, Mapped
|
||||
|
||||
from models import Product
|
||||
from models.base import BaseModel
|
||||
|
||||
deal_product_service_employees = Table(
|
||||
'deal_product_service_employees',
|
||||
if TYPE_CHECKING:
|
||||
from models import Card, Service, User
|
||||
|
||||
card_product_service_employees = Table(
|
||||
'card_product_service_employees',
|
||||
BaseModel.metadata,
|
||||
Column('deal_id', primary_key=True),
|
||||
Column('card_id', primary_key=True),
|
||||
Column('service_id', primary_key=True),
|
||||
Column('product_id', primary_key=True),
|
||||
Column('user_id', ForeignKey('users.id'), primary_key=True),
|
||||
ForeignKeyConstraint(
|
||||
['deal_id', 'product_id', 'service_id'],
|
||||
['deal_product_services.deal_id', 'deal_product_services.product_id', 'deal_product_services.service_id']
|
||||
['card_id', 'product_id', 'service_id'],
|
||||
['card_product_services.card_id', 'card_product_services.product_id', 'card_product_services.service_id']
|
||||
)
|
||||
)
|
||||
deal_service_employees = Table(
|
||||
'deal_service_employees',
|
||||
card_service_employees = Table(
|
||||
'card_service_employees',
|
||||
BaseModel.metadata,
|
||||
Column('deal_id', primary_key=True),
|
||||
Column('card_id', primary_key=True),
|
||||
Column('service_id', primary_key=True),
|
||||
Column('user_id', ForeignKey('users.id'), primary_key=True),
|
||||
ForeignKeyConstraint(
|
||||
['deal_id', 'service_id'],
|
||||
['deal_services.deal_id', 'deal_services.service_id']
|
||||
['card_id', 'service_id'],
|
||||
['card_services.card_id', 'card_services.service_id']
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class DealService(BaseModel):
|
||||
__tablename__ = 'deal_services'
|
||||
deal_id = Column(Integer, ForeignKey('deals.id'),
|
||||
nullable=False,
|
||||
comment='ID Сделки',
|
||||
primary_key=True)
|
||||
deal = relationship('Deal', back_populates='services')
|
||||
class CardService(BaseModel):
|
||||
__tablename__ = 'card_services'
|
||||
card_id: Mapped[int] = mapped_column(
|
||||
ForeignKey('cards.id'),
|
||||
comment='ID Сделки',
|
||||
primary_key=True,
|
||||
)
|
||||
card: Mapped['Card'] = relationship('Card', back_populates='services')
|
||||
|
||||
service_id = Column(Integer, ForeignKey('services.id'), nullable=False, comment='ID Услуги', primary_key=True)
|
||||
service = relationship('Service')
|
||||
service_id: Mapped[int] = mapped_column(ForeignKey('services.id'), nullable=False, comment='ID Услуги', primary_key=True)
|
||||
service: Mapped['Service'] = relationship('Service')
|
||||
|
||||
quantity = Column(Integer, nullable=False, comment='Кол-во услуги')
|
||||
price = Column(Integer, nullable=False, server_default='0', comment='Цена услуги')
|
||||
quantity: Mapped[int] = mapped_column(nullable=False, comment='Кол-во услуги')
|
||||
price: Mapped[int] = mapped_column(nullable=False, server_default='0', comment='Цена услуги')
|
||||
is_fixed_price: Mapped[bool] = mapped_column(default=False, server_default='0', comment='Фиксированная цена')
|
||||
|
||||
employees = relationship('User', secondary=deal_service_employees)
|
||||
employees: Mapped[list['User']] = relationship('User', secondary=card_service_employees)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint('deal_id', 'service_id', name='uix_deal_service'),
|
||||
UniqueConstraint('card_id', 'service_id', name='uix_card_service'),
|
||||
)
|
||||
|
||||
|
||||
class DealProductService(BaseModel):
|
||||
__tablename__ = 'deal_product_services'
|
||||
deal_id = Column(Integer, primary_key=True, nullable=False, comment='ID Сделки')
|
||||
class CardProductService(BaseModel):
|
||||
__tablename__ = 'card_product_services'
|
||||
card_id: Mapped[int] = mapped_column(primary_key=True, nullable=False, comment='ID Сделки')
|
||||
|
||||
product_id = Column(Integer, primary_key=True, nullable=False, comment='ID Продукта')
|
||||
product_id: Mapped[int] = mapped_column(primary_key=True, nullable=False, comment='ID Продукта')
|
||||
|
||||
service_id = Column(Integer, ForeignKey('services.id'), primary_key=True, nullable=False, comment='ID Услуги')
|
||||
service_id: Mapped[int] = mapped_column(
|
||||
ForeignKey('services.id'),
|
||||
primary_key=True,
|
||||
nullable=False,
|
||||
comment='ID Услуги',
|
||||
)
|
||||
|
||||
price = Column(Integer, nullable=False, comment='Цена услуги')
|
||||
price: Mapped[int] = mapped_column(nullable=False, comment='Цена услуги')
|
||||
|
||||
deal_product = relationship('DealProduct',
|
||||
back_populates='services',
|
||||
primaryjoin="and_(DealProductService.deal_id == DealProduct.deal_id, "
|
||||
"DealProductService.product_id == DealProduct.product_id)",
|
||||
foreign_keys=[deal_id, product_id])
|
||||
is_fixed_price: Mapped[bool] = mapped_column(default=False, server_default='0', comment='Фиксированная цена')
|
||||
|
||||
service = relationship('Service',
|
||||
foreign_keys=[service_id],
|
||||
lazy='joined'
|
||||
card_product: Mapped['CardProduct'] = relationship(
|
||||
'CardProduct',
|
||||
back_populates='services',
|
||||
primaryjoin="and_(CardProductService.card_id == CardProduct.card_id, "
|
||||
"CardProductService.product_id == CardProduct.product_id)",
|
||||
foreign_keys=[card_id, product_id],
|
||||
)
|
||||
|
||||
)
|
||||
employees = relationship('User',
|
||||
secondary=deal_product_service_employees,
|
||||
)
|
||||
service: Mapped['Service'] = relationship(
|
||||
'Service',
|
||||
foreign_keys=[service_id],
|
||||
lazy='joined',
|
||||
)
|
||||
employees: Mapped[list['User']] = relationship(
|
||||
'User',
|
||||
secondary=card_product_service_employees,
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
ForeignKeyConstraint(
|
||||
['deal_id', 'product_id'],
|
||||
['deal_products.deal_id', 'deal_products.product_id']
|
||||
['card_id', 'product_id'],
|
||||
['card_products.card_id', 'card_products.product_id']
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class DealProduct(BaseModel):
|
||||
__tablename__ = 'deal_products'
|
||||
deal_id = Column(Integer, ForeignKey('deals.id'), primary_key=True, nullable=False, comment='ID Сделки')
|
||||
product_id = Column(Integer, ForeignKey('products.id'), primary_key=True, nullable=False, comment='ID Продукта')
|
||||
quantity = Column(Integer, nullable=False, comment='Кол-во продукта')
|
||||
class CardProduct(BaseModel):
|
||||
__tablename__ = 'card_products'
|
||||
card_id: Mapped[int] = mapped_column(ForeignKey('cards.id'), primary_key=True, nullable=False, comment='ID карточки')
|
||||
product_id: Mapped[int] = mapped_column(
|
||||
ForeignKey('products.id'),
|
||||
primary_key=True,
|
||||
nullable=False,
|
||||
comment='ID Продукта',
|
||||
)
|
||||
quantity: Mapped[int] = mapped_column(nullable=False, comment='Кол-во продукта')
|
||||
comment: Mapped[str] = mapped_column(nullable=False, server_default='', comment='Комментарий к товару')
|
||||
|
||||
deal = relationship('Deal',
|
||||
back_populates='products',
|
||||
foreign_keys=[deal_id])
|
||||
product = relationship(
|
||||
card: Mapped['Card'] = relationship(
|
||||
'Card',
|
||||
back_populates='products',
|
||||
foreign_keys=[card_id],
|
||||
)
|
||||
product: Mapped['Product'] = relationship(
|
||||
'Product',
|
||||
lazy='joined',
|
||||
foreign_keys=[product_id],
|
||||
)
|
||||
|
||||
services = relationship('DealProductService',
|
||||
back_populates='deal_product',
|
||||
cascade="all, delete-orphan",
|
||||
primaryjoin="and_(DealProductService.deal_id == DealProduct.deal_id, "
|
||||
"DealProductService.product_id == DealProduct.product_id)",
|
||||
foreign_keys=[DealProductService.deal_id, DealProductService.product_id],
|
||||
lazy='selectin',
|
||||
order_by="desc(DealProductService.service_id)"
|
||||
)
|
||||
services: Mapped[list['CardProductService']] = relationship(
|
||||
'CardProductService',
|
||||
back_populates='card_product',
|
||||
cascade="all, delete-orphan",
|
||||
primaryjoin="and_(CardProductService.card_id == CardProduct.card_id, "
|
||||
"CardProductService.product_id == CardProduct.product_id)",
|
||||
foreign_keys=[CardProductService.card_id, CardProductService.product_id],
|
||||
lazy='selectin',
|
||||
order_by="desc(CardProductService.service_id)"
|
||||
)
|
||||
|
||||
|
||||
barcode_template_attribute_link = Table(
|
||||
@@ -114,4 +141,3 @@ barcode_template_attribute_link = Table(
|
||||
Column('barcode_template_id', ForeignKey('barcode_templates.id')),
|
||||
Column('attribute_id', ForeignKey('barcode_template_attributes.id'))
|
||||
)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user