main commit
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
2025-10-16 16:30:25 +09:00
parent 91c7e04474
commit 537e7b363f
1146 changed files with 45926 additions and 77196 deletions

View File

@@ -0,0 +1,151 @@
"""Create nutrition service tables
Revision ID: 3f4e5a1b8c9d
Revises: 2ede6d343f7c
Create Date: 2025-10-16 23:01:02.123456
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '3f4e5a1b8c9d'
down_revision = '49846a45b6b0'
branch_labels = None
depends_on = None
def upgrade():
# Таблица продуктов питания
op.create_table(
'food_items',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('fatsecret_id', sa.String(length=50), nullable=True),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('brand', sa.String(length=255), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('food_type', sa.String(length=50), nullable=True),
sa.Column('serving_size', sa.String(length=100), nullable=True),
sa.Column('serving_weight_grams', sa.Float(), nullable=True),
sa.Column('calories', sa.Float(), nullable=True),
sa.Column('protein_grams', sa.Float(), nullable=True),
sa.Column('fat_grams', sa.Float(), nullable=True),
sa.Column('carbs_grams', sa.Float(), nullable=True),
sa.Column('fiber_grams', sa.Float(), nullable=True),
sa.Column('sugar_grams', sa.Float(), nullable=True),
sa.Column('sodium_mg', sa.Float(), nullable=True),
sa.Column('cholesterol_mg', sa.Float(), nullable=True),
sa.Column('ingredients', sa.Text(), nullable=True),
sa.Column('is_verified', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_food_items_fatsecret_id'), 'food_items', ['fatsecret_id'], unique=True)
op.create_index(op.f('ix_food_items_name'), 'food_items', ['name'], unique=False)
op.create_index(op.f('ix_food_items_uuid'), 'food_items', ['uuid'], unique=True)
# Таблица записей пользователя о потреблении пищи
op.create_table(
'user_nutrition_entries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('entry_date', sa.Date(), nullable=False),
sa.Column('meal_type', sa.String(length=50), nullable=False),
sa.Column('food_item_id', sa.Integer(), nullable=True),
sa.Column('custom_food_name', sa.String(length=255), nullable=True),
sa.Column('quantity', sa.Float(), nullable=False),
sa.Column('unit', sa.String(length=50), nullable=True),
sa.Column('calories', sa.Float(), nullable=True),
sa.Column('protein_grams', sa.Float(), nullable=True),
sa.Column('fat_grams', sa.Float(), nullable=True),
sa.Column('carbs_grams', sa.Float(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['food_item_id'], ['food_items.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_nutrition_entries_entry_date'), 'user_nutrition_entries', ['entry_date'], unique=False)
op.create_index(op.f('ix_user_nutrition_entries_user_id'), 'user_nutrition_entries', ['user_id'], unique=False)
op.create_index(op.f('ix_user_nutrition_entries_uuid'), 'user_nutrition_entries', ['uuid'], unique=True)
# Таблица для отслеживания потребления воды
op.create_table(
'water_intake',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('entry_date', sa.Date(), nullable=False),
sa.Column('amount_ml', sa.Integer(), nullable=False),
sa.Column('entry_time', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('notes', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_water_intake_entry_date'), 'water_intake', ['entry_date'], unique=False)
op.create_index(op.f('ix_water_intake_user_id'), 'water_intake', ['user_id'], unique=False)
op.create_index(op.f('ix_water_intake_uuid'), 'water_intake', ['uuid'], unique=True)
# Таблица для отслеживания физической активности
op.create_table(
'user_activity_entries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('entry_date', sa.Date(), nullable=False),
sa.Column('activity_type', sa.String(length=100), nullable=False),
sa.Column('duration_minutes', sa.Integer(), nullable=False),
sa.Column('calories_burned', sa.Float(), nullable=True),
sa.Column('distance_km', sa.Float(), nullable=True),
sa.Column('steps', sa.Integer(), nullable=True),
sa.Column('intensity', sa.String(length=20), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_activity_entries_entry_date'), 'user_activity_entries', ['entry_date'], unique=False)
op.create_index(op.f('ix_user_activity_entries_user_id'), 'user_activity_entries', ['user_id'], unique=False)
op.create_index(op.f('ix_user_activity_entries_uuid'), 'user_activity_entries', ['uuid'], unique=True)
# Таблица для хранения целей пользователя по питанию и активности
op.create_table(
'nutrition_goals',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('daily_calorie_goal', sa.Integer(), nullable=True),
sa.Column('protein_goal_grams', sa.Integer(), nullable=True),
sa.Column('fat_goal_grams', sa.Integer(), nullable=True),
sa.Column('carbs_goal_grams', sa.Integer(), nullable=True),
sa.Column('water_goal_ml', sa.Integer(), nullable=True),
sa.Column('activity_goal_minutes', sa.Integer(), nullable=True),
sa.Column('weight_goal_kg', sa.Float(), nullable=True),
sa.Column('goal_type', sa.String(length=50), nullable=True),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_nutrition_goals_user_id'), 'nutrition_goals', ['user_id'], unique=True)
def downgrade():
op.drop_index(op.f('ix_nutrition_goals_user_id'), table_name='nutrition_goals')
op.drop_table('nutrition_goals')
op.drop_index(op.f('ix_user_activity_entries_uuid'), table_name='user_activity_entries')
op.drop_index(op.f('ix_user_activity_entries_user_id'), table_name='user_activity_entries')
op.drop_index(op.f('ix_user_activity_entries_entry_date'), table_name='user_activity_entries')
op.drop_table('user_activity_entries')
op.drop_index(op.f('ix_water_intake_uuid'), table_name='water_intake')
op.drop_index(op.f('ix_water_intake_user_id'), table_name='water_intake')
op.drop_index(op.f('ix_water_intake_entry_date'), table_name='water_intake')
op.drop_table('water_intake')
op.drop_index(op.f('ix_user_nutrition_entries_uuid'), table_name='user_nutrition_entries')
op.drop_index(op.f('ix_user_nutrition_entries_user_id'), table_name='user_nutrition_entries')
op.drop_index(op.f('ix_user_nutrition_entries_entry_date'), table_name='user_nutrition_entries')
op.drop_table('user_nutrition_entries')
op.drop_index(op.f('ix_food_items_uuid'), table_name='food_items')
op.drop_index(op.f('ix_food_items_name'), table_name='food_items')
op.drop_index(op.f('ix_food_items_fatsecret_id'), table_name='food_items')
op.drop_table('food_items')

View File

@@ -0,0 +1,217 @@
"""Add nutrition service tables
Revision ID: a2e71842cf5a
Revises: c78a12db4567
Create Date: 2025-10-16 10:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "a2e71842cf5a"
down_revision = "c78a12db4567"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# Создание таблицы food_items
op.create_table(
"food_items",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("uuid", postgresql.UUID(as_uuid=True), nullable=True),
sa.Column("fatsecret_id", sa.String(length=50), nullable=True),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("brand", sa.String(length=255), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("food_type", sa.String(length=50), nullable=True),
sa.Column("serving_size", sa.String(length=100), nullable=True),
sa.Column("serving_weight_grams", sa.Float(), nullable=True),
sa.Column("calories", sa.Float(), nullable=True),
sa.Column("protein_grams", sa.Float(), nullable=True),
sa.Column("fat_grams", sa.Float(), nullable=True),
sa.Column("carbs_grams", sa.Float(), nullable=True),
sa.Column("fiber_grams", sa.Float(), nullable=True),
sa.Column("sugar_grams", sa.Float(), nullable=True),
sa.Column("sodium_mg", sa.Float(), nullable=True),
sa.Column("cholesterol_mg", sa.Float(), nullable=True),
sa.Column("ingredients", sa.Text(), nullable=True),
sa.Column("is_verified", sa.Boolean(), nullable=True),
sa.Column(
"created_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("updated_at", sa.TIMESTAMP(timezone=True), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_food_items_fatsecret_id"), "food_items", ["fatsecret_id"], unique=True
)
op.create_index(op.f("ix_food_items_uuid"), "food_items", ["uuid"], unique=True)
# Создание таблицы user_nutrition_entries
op.create_table(
"user_nutrition_entries",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("uuid", postgresql.UUID(as_uuid=True), nullable=True),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("entry_date", sa.Date(), nullable=False),
sa.Column("meal_type", sa.String(length=50), nullable=False),
sa.Column("food_item_id", sa.Integer(), nullable=True),
sa.Column("custom_food_name", sa.String(length=255), nullable=True),
sa.Column("quantity", sa.Float(), nullable=False),
sa.Column("unit", sa.String(length=50), nullable=True),
sa.Column("calories", sa.Float(), nullable=True),
sa.Column("protein_grams", sa.Float(), nullable=True),
sa.Column("fat_grams", sa.Float(), nullable=True),
sa.Column("carbs_grams", sa.Float(), nullable=True),
sa.Column("notes", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("updated_at", sa.TIMESTAMP(timezone=True), nullable=True),
sa.ForeignKeyConstraint(["food_item_id"], ["food_items.id"],),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_user_nutrition_entries_entry_date"),
"user_nutrition_entries",
["entry_date"],
unique=False,
)
op.create_index(
op.f("ix_user_nutrition_entries_user_id"),
"user_nutrition_entries",
["user_id"],
unique=False,
)
op.create_index(
op.f("ix_user_nutrition_entries_uuid"),
"user_nutrition_entries",
["uuid"],
unique=True
)
# Создание таблицы water_intake
op.create_table(
"water_intake",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("uuid", postgresql.UUID(as_uuid=True), nullable=True),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("entry_date", sa.Date(), nullable=False),
sa.Column("amount_ml", sa.Integer(), nullable=False),
sa.Column(
"entry_time",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("notes", sa.Text(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_water_intake_entry_date"), "water_intake", ["entry_date"], unique=False
)
op.create_index(
op.f("ix_water_intake_user_id"), "water_intake", ["user_id"], unique=False
)
op.create_index(op.f("ix_water_intake_uuid"), "water_intake", ["uuid"], unique=True)
# Создание таблицы user_activity_entries
op.create_table(
"user_activity_entries",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("uuid", postgresql.UUID(as_uuid=True), nullable=True),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("entry_date", sa.Date(), nullable=False),
sa.Column("activity_type", sa.String(length=100), nullable=False),
sa.Column("duration_minutes", sa.Integer(), nullable=False),
sa.Column("calories_burned", sa.Float(), nullable=True),
sa.Column("distance_km", sa.Float(), nullable=True),
sa.Column("steps", sa.Integer(), nullable=True),
sa.Column("intensity", sa.String(length=20), nullable=True),
sa.Column("notes", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_user_activity_entries_entry_date"),
"user_activity_entries",
["entry_date"],
unique=False,
)
op.create_index(
op.f("ix_user_activity_entries_user_id"),
"user_activity_entries",
["user_id"],
unique=False,
)
op.create_index(
op.f("ix_user_activity_entries_uuid"),
"user_activity_entries",
["uuid"],
unique=True
)
# Создание таблицы nutrition_goals
op.create_table(
"nutrition_goals",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("daily_calorie_goal", sa.Integer(), nullable=True),
sa.Column("protein_goal_grams", sa.Integer(), nullable=True),
sa.Column("fat_goal_grams", sa.Integer(), nullable=True),
sa.Column("carbs_goal_grams", sa.Integer(), nullable=True),
sa.Column("water_goal_ml", sa.Integer(), nullable=True),
sa.Column("activity_goal_minutes", sa.Integer(), nullable=True),
sa.Column("weight_goal_kg", sa.Float(), nullable=True),
sa.Column("goal_type", sa.String(length=50), nullable=True),
sa.Column(
"created_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("updated_at", sa.TIMESTAMP(timezone=True), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_nutrition_goals_user_id"), "nutrition_goals", ["user_id"], unique=True
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_nutrition_goals_user_id"), table_name="nutrition_goals")
op.drop_table("nutrition_goals")
op.drop_index(op.f("ix_user_activity_entries_uuid"), table_name="user_activity_entries")
op.drop_index(op.f("ix_user_activity_entries_user_id"), table_name="user_activity_entries")
op.drop_index(op.f("ix_user_activity_entries_entry_date"), table_name="user_activity_entries")
op.drop_table("user_activity_entries")
op.drop_index(op.f("ix_water_intake_uuid"), table_name="water_intake")
op.drop_index(op.f("ix_water_intake_user_id"), table_name="water_intake")
op.drop_index(op.f("ix_water_intake_entry_date"), table_name="water_intake")
op.drop_table("water_intake")
op.drop_index(op.f("ix_user_nutrition_entries_uuid"), table_name="user_nutrition_entries")
op.drop_index(op.f("ix_user_nutrition_entries_user_id"), table_name="user_nutrition_entries")
op.drop_index(op.f("ix_user_nutrition_entries_entry_date"), table_name="user_nutrition_entries")
op.drop_table("user_nutrition_entries")
op.drop_index(op.f("ix_food_items_uuid"), table_name="food_items")
op.drop_index(op.f("ix_food_items_fatsecret_id"), table_name="food_items")
op.drop_table("food_items")
# ### end Alembic commands ###

View File

@@ -25,6 +25,7 @@ services:
- LOCATION_SERVICE_URL=http://location-service-1:8003,http://location-service-2:8003
- CALENDAR_SERVICE_URL=http://calendar-service-1:8004,http://calendar-service-2:8004
- NOTIFICATION_SERVICE_URL=http://notification-service-1:8005,http://notification-service-2:8005
- NUTRITION_SERVICE_URL=http://nutrition-service-1:8006,http://nutrition-service-2:8006
- REDIS_URL=redis://redis-cluster:6379/0
depends_on:
- redis-cluster
@@ -47,6 +48,7 @@ services:
- LOCATION_SERVICE_URL=http://location-service-1:8003,http://location-service-2:8003
- CALENDAR_SERVICE_URL=http://calendar-service-1:8004,http://calendar-service-2:8004
- NOTIFICATION_SERVICE_URL=http://notification-service-1:8005,http://notification-service-2:8005
- NUTRITION_SERVICE_URL=http://nutrition-service-1:8006,http://nutrition-service-2:8006
- REDIS_URL=redis://redis-cluster:6379/0
depends_on:
- redis-cluster
@@ -286,4 +288,48 @@ volumes:
kafka_3_data:
zookeeper_data:
prometheus_data:
grafana_data:
grafana_data:
# Nutrition Service Cluster
nutrition-service-1:
image: women-safety/nutrition-service:${TAG:-latest}
environment:
- NODE_ID=1
- DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgres-primary:5432/women_safety_prod
- DATABASE_REPLICA_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgres-replica:5432/women_safety_prod
- REDIS_URL=redis://redis-cluster:6379/5
- FATSECRET_CLIENT_ID=${FATSECRET_CLIENT_ID}
- FATSECRET_CLIENT_SECRET=${FATSECRET_CLIENT_SECRET}
depends_on:
- postgres-primary
- redis-cluster
restart: always
deploy:
resources:
limits:
cpus: '1.0'
memory: 2G
reservations:
cpus: '0.5'
memory: 512M
nutrition-service-2:
image: women-safety/nutrition-service:${TAG:-latest}
environment:
- NODE_ID=2
- DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgres-primary:5432/women_safety_prod
- DATABASE_REPLICA_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgres-replica:5432/women_safety_prod
- REDIS_URL=redis://redis-cluster:6379/5
- FATSECRET_CLIENT_ID=${FATSECRET_CLIENT_ID}
- FATSECRET_CLIENT_SECRET=${FATSECRET_CLIENT_SECRET}
depends_on:
- postgres-primary
- redis-cluster
restart: always
deploy:
resources:
limits:
cpus: '1.0'
memory: 2G
reservations:
cpus: '0.5'
memory: 512M

View File

@@ -29,12 +29,14 @@ services:
- LOCATION_SERVICE_URL=http://location-service:8003
- CALENDAR_SERVICE_URL=http://calendar-service:8004
- NOTIFICATION_SERVICE_URL=http://notification-service:8005
- NUTRITION_SERVICE_URL=http://nutrition-service:8006
depends_on:
- user-service
- emergency-service
- location-service
- calendar-service
- notification-service
- nutrition-service
user-service:
image: women-safety/user-service:latest
@@ -96,5 +98,18 @@ services:
- postgres
- redis
nutrition-service:
image: women-safety/nutrition-service:latest
ports:
- "8006:8006"
environment:
- DATABASE_URL=postgresql://postgres:postgres@postgres:5432/women_safety_test
- REDIS_URL=redis://redis:6379/5
- FATSECRET_CLIENT_ID=test-fatsecret-client-id
- FATSECRET_CLIENT_SECRET=test-fatsecret-client-secret
depends_on:
- postgres
- redis
volumes:
postgres_test_data:

View File

@@ -6,6 +6,26 @@ The Women's Safety App provides a comprehensive API for managing user profiles,
**Base URL:** `http://localhost:8000` (API Gateway)
## Swagger Documentation
Интерактивная документация API доступна через Swagger UI по следующим URL:
- API Gateway: `http://localhost:8000/docs`
- User Service: `http://localhost:8001/docs`
- Emergency Service: `http://localhost:8002/docs`
- Location Service: `http://localhost:8003/docs`
- Calendar Service: `http://localhost:8004/docs`
- Notification Service: `http://localhost:8005/docs`
- Nutrition Service: `http://localhost:8006/docs`
Документация в формате ReDoc доступна по адресам:
- API Gateway: `http://localhost:8000/redoc`
- User Service: `http://localhost:8001/redoc`
- (и т.д. для остальных сервисов)
> **Примечание**: Swagger-документация для каждого сервиса доступна только при запущенном соответствующем сервисе. Если сервис не запущен, страница документации будет недоступна.
## Authentication
All endpoints except registration and login require JWT authentication.
@@ -15,6 +35,29 @@ All endpoints except registration and login require JWT authentication.
Authorization: Bearer <jwt_token>
```
### Testing with Swagger UI
Для тестирования API через Swagger UI:
1. Запустите необходимые сервисы:
```bash
./start_services.sh
```
2. Откройте Swagger UI в браузере:
```
http://localhost:8000/docs
```
3. Получите JWT-токен через эндпоинты `/api/v1/auth/login` или `/api/v1/auth/register`
4. Авторизуйтесь в Swagger UI:
- Нажмите на кнопку "Authorize" в правом верхнем углу
- Введите полученный JWT-токен в формате: `Bearer <token>`
- Нажмите "Authorize"
5. Теперь вы можете тестировать все защищенные эндпоинты
## API Endpoints
### 🔐 Authentication
@@ -247,6 +290,109 @@ Authorization: Bearer <token>
}
```
### 🍎 Nutrition Services
#### Search Food Items
```http
GET /api/v1/nutrition/foods?query=apple
Authorization: Bearer <token>
```
**Response:**
```json
{
"results": [
{
"food_id": "123456",
"name": "Apple, raw, with skin",
"brand": "",
"calories": 52,
"serving_size": "100g",
"nutrients": {
"carbohydrates": 13.8,
"protein": 0.3,
"fat": 0.2,
"fiber": 2.4
}
},
{
"food_id": "789012",
"name": "Apple juice, unsweetened",
"brand": "Example Brand",
"calories": 46,
"serving_size": "100ml",
"nutrients": {
"carbohydrates": 11.2,
"protein": 0.1,
"fat": 0.1,
"fiber": 0.2
}
}
]
}
```
#### Add Nutrition Entry
```http
POST /api/v1/nutrition/entries
Authorization: Bearer <token>
```
**Body:**
```json
{
"food_id": "123456",
"date": "2025-10-16",
"meal_type": "lunch",
"quantity": 1.0,
"serving_size": "100g",
"notes": "Red apple"
}
```
#### Get Daily Nutrition Summary
```http
GET /api/v1/nutrition/daily-summary?date=2025-10-16
Authorization: Bearer <token>
```
**Response:**
```json
{
"date": "2025-10-16",
"total_calories": 1578,
"total_carbohydrates": 175.3,
"total_proteins": 78.2,
"total_fats": 52.8,
"total_water": 1200,
"entries": [
{
"id": 123,
"food_name": "Apple, raw, with skin",
"meal_type": "lunch",
"calories": 52,
"quantity": 1.0,
"serving_size": "100g"
}
]
}
```
#### Track Water Intake
```http
POST /api/v1/nutrition/water
Authorization: Bearer <token>
```
**Body:**
```json
{
"date": "2025-10-16",
"amount_ml": 250,
"time": "12:30:00"
}
```
### 📊 System Status
#### Check Service Health

View File

@@ -25,16 +25,16 @@ This document describes the microservices architecture of the Women's Safety App
│ Request Routing) │
└───────────────────────────┘
┌─────────────┬──────────────┼──────────────┬─────────────┐
│ │ │ │ │
┌─────────┐ ┌─────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐
│ User │ │Emergency│ │ Location │ │ Calendar │ │Notification │
│Service │ │Service │ │ Service │ │ Service │ │ Service │
│:8001 │ │:8002 │ │ :8003 │ │ :8004 │ │ :8005 │
└─────────┘ └─────────┘ └─────────────┘ └─────────────┘ └─────────────┘
│ │ │ │ │
└─────────────┼──────────────┼──────────────┼─────────────┘
│ │ │
┌─────────────┬──────────────┼──────────────┬─────────────┬─────────────
│ │ │ │ │
┌─────────┐ ┌─────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐
│ User │ │Emergency│ │ Location │ │ Calendar │ │Notification │ │ Nutrition │
│Service │ │Service │ │ Service │ │ Service │ │ Service │ │ Service │
│:8001 │ │:8002 │ │ :8003 │ │ :8004 │ │ :8005 │ │ :8006 │
└─────────┘ └─────────┘ └─────────────┘ └─────────────┘ └─────────────┘ └─────────────┘
│ │ │ │ │
└─────────────┼──────────────┼──────────────┼─────────────┼─────────────
│ │ │
┌────────────────────────────────────────────────┐
│ Message Bus │
│ (Kafka/RabbitMQ) │

228
docs/FATSECRET_API.md Normal file
View File

@@ -0,0 +1,228 @@
# Работа с FatSecret API в проекте
Этот документ описывает, как используется API FatSecret для получения данных о продуктах питания и их пищевой ценности в нашем проекте.
## Настройка API
### Ключи API
Для работы с FatSecret API необходимы следующие ключи:
- `FATSECRET_CLIENT_ID` - ID клиента
- `FATSECRET_CLIENT_SECRET` - секрет клиента
- `FATSECRET_CUSTOMER_KEY` - ключ пользователя (используется как альтернатива CLIENT_ID)
Эти ключи хранятся в `.env` файле проекта и загружаются в конфигурацию через модуль `shared/config.py`.
### Методы аутентификации
FatSecret API поддерживает два метода аутентификации:
1. **OAuth 2.0** - требует прокси-сервер с белым списком IP для запроса токенов (не работает в нашем тестовом окружении)
2. **OAuth 1.0** - работает напрямую и подписывает каждый запрос (рекомендуется использовать)
## Примеры использования API
### Поиск продуктов питания
```python
def search_food(query, max_results=5):
"""Поиск продуктов по названию"""
# URL для API
url = "https://platform.fatsecret.com/rest/server.api"
# Параметры запроса
params = {
'method': 'foods.search',
'search_expression': query,
'max_results': max_results,
'format': 'json'
}
# Подписываем запрос с помощью OAuth 1.0
oauth_params = generate_oauth_params("GET", url, params)
# Отправляем запрос
response = requests.get(url, params=oauth_params)
if response.status_code == 200:
return response.json()
return None
```
### Получение информации о продукте
```python
def get_food_details(food_id):
"""Получение подробной информации о продукте по ID"""
# URL для API
url = "https://platform.fatsecret.com/rest/server.api"
# Параметры запроса
params = {
'method': 'food.get',
'food_id': food_id,
'format': 'json'
}
# Подписываем запрос с помощью OAuth 1.0
oauth_params = generate_oauth_params("GET", url, params)
# Отправляем запрос
response = requests.get(url, params=oauth_params)
if response.status_code == 200:
return response.json()
return None
```
## Генерация OAuth 1.0 подписи
```python
def generate_oauth_params(http_method, url, params):
"""Создание и подписание OAuth 1.0 параметров"""
# Текущее время в секундах
timestamp = str(int(time.time()))
# Случайная строка для nonce
nonce = ''.join([str(random.randint(0, 9)) for _ in range(8)])
# Базовый набор параметров OAuth
oauth_params = {
'oauth_consumer_key': FATSECRET_KEY,
'oauth_nonce': nonce,
'oauth_signature_method': 'HMAC-SHA1',
'oauth_timestamp': timestamp,
'oauth_version': '1.0'
}
# Объединяем с параметрами запроса
all_params = {**params, **oauth_params}
# Сортируем параметры по ключу
sorted_params = sorted(all_params.items())
# Создаем строку параметров для подписи
param_string = "&".join([f"{urllib.parse.quote(str(k), safe='')}={urllib.parse.quote(str(v), safe='')}"
for k, v in sorted_params])
# Создаем строку для подписи
signature_base = f"{http_method}&{urllib.parse.quote(url, safe='')}&{urllib.parse.quote(param_string, safe='')}"
# Создаем ключ для подписи
signing_key = f"{urllib.parse.quote(str(FATSECRET_SECRET), safe='')}&"
# Создаем HMAC-SHA1 подпись
signature = base64.b64encode(
hmac.new(
signing_key.encode(),
signature_base.encode(),
hashlib.sha1
).digest()
).decode()
# Добавляем подпись к параметрам OAuth
all_params['oauth_signature'] = signature
return all_params
```
## Формат ответа API
### Поиск продуктов
Структура ответа от метода `foods.search`:
```json
{
"foods": {
"max_results": "5",
"total_results": "1000",
"page_number": "0",
"food": [
{
"food_id": "35718",
"food_name": "Apples",
"food_description": "Per 100g - Calories: 52kcal | Fat: 0.17g | Carbs: 13.81g | Protein: 0.26g",
"food_url": "https://www.fatsecret.com/calories-nutrition/usda/apples?portionid=34128"
},
// ...другие продукты
]
}
}
```
### Информация о продукте
Структура ответа от метода `food.get`:
```json
{
"food": {
"food_id": "35718",
"food_name": "Apples",
"food_type": "Generic",
"servings": {
"serving": [
{
"serving_id": "34128",
"serving_description": "100g",
"calories": "52",
"carbohydrate": "13.81",
"protein": "0.26",
"fat": "0.17",
// другие пищевые вещества
},
// другие варианты порций
]
}
}
}
```
## Ограничения API
1. Функциональность поиска на русском языке может быть недоступна в базовой версии API
2. Ограничение на количество запросов в месяц (зависит от уровня доступа)
3. OAuth 2.0 требует прокси-сервера, настроенного на определенные IP-адреса
## Тестирование API
Для тестирования API можно использовать готовый тестовый скрипт, который находится в корне проекта:
```bash
# Активировать виртуальное окружение
source venv/bin/activate
# Запустить тестовый скрипт
python test_fatsecret_api_oauth1.py
```
Вы также можете использовать этот скрипт как шаблон для написания собственных тестов. Примеры использования:
```python
# Импортировать функции из тестового скрипта
from test_fatsecret_api_oauth1 import search_food, process_search_results
# Поиск продуктов на английском
result = search_food("chicken breast")
process_search_results(result)
# Поиск продуктов на русском
result = search_food("яблоко", locale="ru_RU")
process_search_results(result)
```
### Примеры команд для тестирования через cURL
Для тестирования API через cURL можно использовать следующие команды:
```bash
# Поиск продуктов через nutrition service (требуется авторизация)
curl -X POST http://localhost:8006/api/v1/nutrition/search \
-H "Content-Type: application/json" \
-H "Authorization: Bearer YOUR_JWT_TOKEN" \
-d '{"query": "apple", "max_results": 5}'
# Прямое тестирование FatSecret API (OAuth 1.0)
curl -X GET "https://platform.fatsecret.com/rest/server.api?method=foods.search&search_expression=apple&max_results=5&format=json&oauth_consumer_key=YOUR_CONSUMER_KEY&oauth_signature_method=HMAC-SHA1&oauth_timestamp=TIMESTAMP&oauth_nonce=NONCE&oauth_version=1.0&oauth_signature=YOUR_SIGNATURE"
```
> **Примечание:** Для выполнения прямого запроса к FatSecret API через cURL необходимо сгенерировать правильную OAuth 1.0 подпись. Рекомендуется использовать скрипт `test_fatsecret_api_oauth1.py` вместо этого.
## Рекомендации по использованию
1. Использовать OAuth 1.0 для аутентификации, так как он работает без дополнительной инфраструктуры
2. Кэшировать результаты запросов, чтобы снизить нагрузку на API
3. Обрабатывать возможные ошибки API и предоставлять пользователям понятные сообщения
4. Использовать английские запросы для поиска, так как база данных в основном на английском языке

593
docs/NUTRITION_API.md Normal file
View File

@@ -0,0 +1,593 @@
# API Сервиса Питания (Nutrition Service)
Сервис питания предоставляет API для работы с данными о питании, включая поиск продуктов питания, добавление продуктов в дневник питания, отслеживание потребления воды и физической активности.
## Основные функции
- Поиск продуктов питания через FatSecret API
- Отслеживание потребления пищи и питательных веществ
- Учет потребления воды
- Отслеживание физической активности
- Установка и отслеживание целей по питанию и активности
## Базовый URL
```
http://localhost:8006/api/v1/nutrition/
```
## Swagger-документация
Интерактивная документация API доступна через Swagger UI по следующим URL:
```
http://localhost:8006/docs
```
или через ReDoc:
```
http://localhost:8006/redoc
```
> **Примечание**: Swagger-документация доступна только при запущенном сервисе питания. Если сервис не запущен, страница документации будет недоступна.
### Использование Swagger UI
1. Откройте URL `http://localhost:8006/docs` в браузере
2. Авторизуйтесь с помощью кнопки "Authorize" в верхней части страницы:
- Введите ваш JWT токен в формате: `Bearer <token>`
- Нажмите "Authorize"
3. Теперь вы можете тестировать все эндпоинты API непосредственно через Swagger UI:
- Выберите нужный эндпоинт
- Заполните параметры запроса
- Нажмите "Execute" для отправки запроса
![Swagger UI Example](https://swagger.io/swagger/media/images/swagger-ui-example.png)
## Эндпоинты
### Поиск продуктов
#### Поиск по названию
```http
POST /api/v1/nutrition/search
```
Параметры запроса:
```json
{
"query": "яблоко",
"page_number": 0,
"max_results": 10
}
```
Ответ:
```json
[
{
"id": 1,
"uuid": "123e4567-e89b-12d3-a456-426614174000",
"fatsecret_id": "35718",
"name": "Apple",
"brand": null,
"description": "A common fruit",
"food_type": "Generic",
"serving_size": "100g",
"serving_weight_grams": 100.0,
"calories": 52.0,
"protein_grams": 0.26,
"fat_grams": 0.17,
"carbs_grams": 13.81,
"fiber_grams": 2.4,
"sugar_grams": 10.39,
"sodium_mg": 1.0,
"cholesterol_mg": 0.0,
"ingredients": null,
"is_verified": true,
"created_at": "2025-10-16T23:10:00"
}
]
```
#### Получение информации о продукте
```http
GET /api/v1/nutrition/food/{food_id}
```
Ответ:
```json
{
"id": 1,
"uuid": "123e4567-e89b-12d3-a456-426614174000",
"fatsecret_id": "35718",
"name": "Apple",
"brand": null,
"description": "A common fruit",
"food_type": "Generic",
"serving_size": "100g",
"serving_weight_grams": 100.0,
"calories": 52.0,
"protein_grams": 0.26,
"fat_grams": 0.17,
"carbs_grams": 13.81,
"fiber_grams": 2.4,
"sugar_grams": 10.39,
"sodium_mg": 1.0,
"cholesterol_mg": 0.0,
"ingredients": null,
"is_verified": true,
"created_at": "2025-10-16T23:10:00"
}
```
### Дневник питания
#### Добавление записи в дневник питания
```http
POST /api/v1/nutrition/diary
```
Параметры запроса:
```json
{
"food_item_id": 1,
"entry_date": "2025-10-16",
"meal_type": "breakfast",
"quantity": 1.5,
"unit": "piece",
"notes": "Morning apple"
}
```
Ответ:
```json
{
"id": 1,
"uuid": "123e4567-e89b-12d3-a456-426614174000",
"user_id": 42,
"entry_date": "2025-10-16",
"meal_type": "breakfast",
"food_item_id": 1,
"custom_food_name": null,
"quantity": 1.5,
"unit": "piece",
"calories": 78.0,
"protein_grams": 0.39,
"fat_grams": 0.255,
"carbs_grams": 20.715,
"notes": "Morning apple",
"created_at": "2025-10-16T23:15:00"
}
```
#### Получение записей дневника за день
```http
GET /api/v1/nutrition/diary?date=2025-10-16
```
Ответ:
```json
[
{
"id": 1,
"uuid": "123e4567-e89b-12d3-a456-426614174000",
"user_id": 42,
"entry_date": "2025-10-16",
"meal_type": "breakfast",
"food_item_id": 1,
"custom_food_name": null,
"quantity": 1.5,
"unit": "piece",
"calories": 78.0,
"protein_grams": 0.39,
"fat_grams": 0.255,
"carbs_grams": 20.715,
"notes": "Morning apple",
"created_at": "2025-10-16T23:15:00"
}
]
```
#### Получение сводки за день
```http
GET /api/v1/nutrition/summary?date=2025-10-16
```
Ответ:
```json
{
"date": "2025-10-16",
"total_calories": 2150.5,
"total_protein": 85.2,
"total_fat": 65.4,
"total_carbs": 275.3,
"water_consumed_ml": 1500,
"activity_minutes": 45,
"calories_burned": 350,
"entries_by_meal": {
"breakfast": [
{
"id": 1,
"food_name": "Apple",
"quantity": 1.5,
"unit": "piece",
"calories": 78.0
}
],
"lunch": [...],
"dinner": [...],
"snack": [...]
}
}
```
### Потребление воды
#### Добавление записи о потреблении воды
```http
POST /api/v1/nutrition/water
```
Параметры запроса:
```json
{
"amount_ml": 250,
"entry_date": "2025-10-16",
"notes": "Morning glass"
}
```
Ответ:
```json
{
"id": 1,
"uuid": "123e4567-e89b-12d3-a456-426614174000",
"user_id": 42,
"entry_date": "2025-10-16",
"amount_ml": 250,
"entry_time": "2025-10-16T08:30:00",
"notes": "Morning glass"
}
```
#### Получение записей о потреблении воды за день
```http
GET /api/v1/nutrition/water?date=2025-10-16
```
Ответ:
```json
[
{
"id": 1,
"uuid": "123e4567-e89b-12d3-a456-426614174000",
"user_id": 42,
"entry_date": "2025-10-16",
"amount_ml": 250,
"entry_time": "2025-10-16T08:30:00",
"notes": "Morning glass"
},
{
"id": 2,
"uuid": "223e4567-e89b-12d3-a456-426614174001",
"user_id": 42,
"entry_date": "2025-10-16",
"amount_ml": 500,
"entry_time": "2025-10-16T12:15:00",
"notes": "Lunch"
}
]
```
### Физическая активность
#### Добавление записи о физической активности
```http
POST /api/v1/nutrition/activity
```
Параметры запроса:
```json
{
"entry_date": "2025-10-16",
"activity_type": "running",
"duration_minutes": 30,
"distance_km": 5.2,
"intensity": "medium",
"notes": "Morning run"
}
```
Ответ:
```json
{
"id": 1,
"uuid": "123e4567-e89b-12d3-a456-426614174000",
"user_id": 42,
"entry_date": "2025-10-16",
"activity_type": "running",
"duration_minutes": 30,
"calories_burned": 300.5,
"distance_km": 5.2,
"steps": null,
"intensity": "medium",
"notes": "Morning run",
"created_at": "2025-10-16T09:00:00"
}
```
#### Получение записей об активности за день
```http
GET /api/v1/nutrition/activity?date=2025-10-16
```
Ответ:
```json
[
{
"id": 1,
"uuid": "123e4567-e89b-12d3-a456-426614174000",
"user_id": 42,
"entry_date": "2025-10-16",
"activity_type": "running",
"duration_minutes": 30,
"calories_burned": 300.5,
"distance_km": 5.2,
"steps": null,
"intensity": "medium",
"notes": "Morning run",
"created_at": "2025-10-16T09:00:00"
}
]
```
### Цели по питанию и активности
#### Установка целей
```http
POST /api/v1/nutrition/goals
```
Параметры запроса:
```json
{
"daily_calorie_goal": 2000,
"protein_goal_grams": 100,
"fat_goal_grams": 65,
"carbs_goal_grams": 250,
"water_goal_ml": 2500,
"activity_goal_minutes": 45,
"weight_goal_kg": 75.5,
"goal_type": "lose_weight"
}
```
Ответ:
```json
{
"id": 1,
"user_id": 42,
"daily_calorie_goal": 2000,
"protein_goal_grams": 100,
"fat_goal_grams": 65,
"carbs_goal_grams": 250,
"water_goal_ml": 2500,
"activity_goal_minutes": 45,
"weight_goal_kg": 75.5,
"goal_type": "lose_weight",
"created_at": "2025-10-16T10:00:00",
"updated_at": "2025-10-16T10:00:00"
}
```
#### Получение текущих целей
```http
GET /api/v1/nutrition/goals
```
Ответ:
```json
{
"id": 1,
"user_id": 42,
"daily_calorie_goal": 2000,
"protein_goal_grams": 100,
"fat_goal_grams": 65,
"carbs_goal_grams": 250,
"water_goal_ml": 2500,
"activity_goal_minutes": 45,
"weight_goal_kg": 75.5,
"goal_type": "lose_weight",
"created_at": "2025-10-16T10:00:00",
"updated_at": "2025-10-16T10:00:00"
}
```
## Коды ошибок
| Код | Описание |
|-----|----------|
| 400 | Некорректный запрос |
| 401 | Не авторизован |
| 403 | Доступ запрещен |
| 404 | Ресурс не найден |
| 500 | Внутренняя ошибка сервера |
## Аутентификация
Все запросы к API требуют JWT-токен в заголовке Authorization:
```
Authorization: Bearer <token>
```
Токен можно получить через сервис авторизации (User Service) по эндпоинту `/api/v1/auth/login`.
## Интеграции
Сервис питания интегрирован с API FatSecret для получения данных о продуктах питания и их пищевой ценности. Работа с API FatSecret осуществляется через OAuth 1.0 аутентификацию с использованием ключей, указанных в конфигурации приложения.
## Тестирование API
### Тестирование через Swagger UI
Самый простой способ протестировать API - использовать встроенный интерфейс Swagger UI:
1. Убедитесь, что сервис питания запущен:
```bash
# Запуск всех сервисов
./start_services.sh
```
2. Откройте в браузере URL: `http://localhost:8006/docs`
3. Авторизуйтесь:
- Нажмите на кнопку "Authorize" в правом верхнем углу
- Введите ваш JWT токен в формате `Bearer <token>`
- Нажмите "Authorize"
4. Теперь вы можете интерактивно тестировать все эндпоинты:
- Выберите нужный эндпоинт
- Заполните параметры запроса
- Нажмите "Execute"
- Просмотрите результат запроса и код ответа
### Настройка и запуск через CLI
1. Убедитесь, что все необходимые сервисы запущены:
```bash
# Запуск всех сервисов
./start_services.sh
```
2. Получите токен аутентификации:
```bash
# Регистрация нового пользователя
curl -X POST http://localhost:8001/api/v1/auth/register -H "Content-Type: application/json" -d '{
"email": "test_user@example.com",
"username": "test_user",
"password": "Test123!",
"first_name": "Test",
"last_name": "User",
"phone": "+79991234567"
}' | jq
# Вход и получение токена
curl -X POST http://localhost:8001/api/v1/auth/login -H "Content-Type: application/json" -d '{
"username": "test_user",
"password": "Test123!"
}' | jq
```
3. Сохраните полученный токен в переменную для дальнейшего использования:
```bash
export TOKEN=аш_полученный_jwt_токен"
```
### Примеры запросов
#### Поиск продуктов
```bash
# Поиск продуктов по названию
curl -X POST http://localhost:8006/api/v1/nutrition/search \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $TOKEN" \
-d '{
"query": "apple",
"max_results": 5
}' | jq
```
#### Работа с дневником питания
```bash
# Добавление записи в дневник питания
curl -X POST http://localhost:8006/api/v1/nutrition/diary \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $TOKEN" \
-d '{
"food_item_id": 1,
"entry_date": "2025-10-16",
"meal_type": "breakfast",
"quantity": 1.5,
"unit": "piece",
"notes": "Morning apple"
}' | jq
# Получение дневника за день
curl -X GET http://localhost:8006/api/v1/nutrition/diary?date=2025-10-16 \
-H "Authorization: Bearer $TOKEN" | jq
```
#### Работа с водой
```bash
# Добавление записи о потреблении воды
curl -X POST http://localhost:8006/api/v1/nutrition/water \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $TOKEN" \
-d '{
"amount_ml": 250,
"entry_date": "2025-10-16",
"notes": "Morning glass"
}' | jq
# Получение записей о потреблении воды за день
curl -X GET http://localhost:8006/api/v1/nutrition/water?date=2025-10-16 \
-H "Authorization: Bearer $TOKEN" | jq
```
#### Работа с активностью
```bash
# Добавление записи о физической активности
curl -X POST http://localhost:8006/api/v1/nutrition/activity \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $TOKEN" \
-d '{
"entry_date": "2025-10-16",
"activity_type": "running",
"duration_minutes": 30,
"distance_km": 5.2,
"intensity": "medium",
"notes": "Morning run"
}' | jq
# Получение записей об активности за день
curl -X GET http://localhost:8006/api/v1/nutrition/activity?date=2025-10-16 \
-H "Authorization: Bearer $TOKEN" | jq
```
### Автоматизированное тестирование
В папке `tests` есть скрипты для автоматизированного тестирования API:
```bash
# Запуск всех тестов для nutrition service
cd tests
./test_nutrition_service.sh
# Запуск тестов через Python
python test_nutrition_api.py
```
Для непосредственного тестирования FatSecret API можно использовать скрипт в корне проекта:
```bash
# Тестирование FatSecret API
python test_fatsecret_api_oauth1.py
```

View File

@@ -0,0 +1,188 @@
# Nutrition Service API Documentation
## Overview
Nutrition Service предоставляет API для отслеживания питания, подсчета калорий и получения информации о продуктах питания через интеграцию с FatSecret API. Сервис позволяет пользователям контролировать свой рацион и отслеживать потребление воды.
**Base URL:** `/api/v1/nutrition`
## Authentication
Все эндпоинты требуют JWT аутентификацию.
**Headers:**
```
Authorization: Bearer <jwt_token>
```
## API Endpoints
### 🔍 Поиск продуктов
#### Найти продукты по названию
```http
GET /api/v1/nutrition/foods?query=яблоко
Authorization: Bearer <token>
```
**Параметры:**
- `query` (string, required): Поисковый запрос для поиска продуктов
- `page` (number, optional): Номер страницы результатов, по умолчанию 1
- `page_size` (number, optional): Количество результатов на странице, по умолчанию 20
**Response:**
```json
{
"results": [
{
"food_id": "123456",
"name": "Яблоко, сырое, с кожурой",
"brand": "",
"calories": 52,
"serving_size": "100г",
"nutrients": {
"carbohydrates": 13.8,
"protein": 0.3,
"fat": 0.2,
"fiber": 2.4
}
}
],
"total": 25,
"page": 1,
"page_size": 20
}
```
### 📝 Записи о питании
#### Добавить запись о питании
```http
POST /api/v1/nutrition/entries
Authorization: Bearer <token>
```
**Body:**
```json
{
"food_id": "123456",
"date": "2025-10-16",
"meal_type": "lunch",
"quantity": 1.0,
"serving_size": "100г",
"notes": "Красное яблоко"
}
```
**Варианты типов приема пищи (meal_type):**
- `breakfast` - завтрак
- `lunch` - обед
- `dinner` - ужин
- `snack` - перекус
#### Получить записи о питании
```http
GET /api/v1/nutrition/entries?date=2025-10-16
Authorization: Bearer <token>
```
**Параметры:**
- `date` (string, optional): Дата в формате YYYY-MM-DD
- `start_date` (string, optional): Начальная дата для получения записей за период
- `end_date` (string, optional): Конечная дата для получения записей за период
- `meal_type` (string, optional): Фильтр по типу приема пищи
#### Удалить запись о питании
```http
DELETE /api/v1/nutrition/entries/{entry_id}
Authorization: Bearer <token>
```
### 💧 Отслеживание воды
#### Добавить запись о потреблении воды
```http
POST /api/v1/nutrition/water
Authorization: Bearer <token>
```
**Body:**
```json
{
"date": "2025-10-16",
"amount_ml": 250,
"time": "12:30:00"
}
```
#### Получить записи о потреблении воды
```http
GET /api/v1/nutrition/water?date=2025-10-16
Authorization: Bearer <token>
```
### 📊 Сводки и статистика
#### Получить дневную сводку по питанию
```http
GET /api/v1/nutrition/daily-summary?date=2025-10-16
Authorization: Bearer <token>
```
**Response:**
```json
{
"date": "2025-10-16",
"total_calories": 1578,
"total_carbohydrates": 175.3,
"total_proteins": 78.2,
"total_fats": 52.8,
"total_water": 1200,
"entries": [
{
"id": 123,
"food_name": "Яблоко, сырое, с кожурой",
"meal_type": "lunch",
"calories": 52,
"quantity": 1.0,
"serving_size": "100г"
}
]
}
```
#### Получить недельную аналитику
```http
GET /api/v1/nutrition/weekly-summary?start_date=2025-10-10
Authorization: Bearer <token>
```
## Интеграция с FatSecret API
Сервис использует FatSecret API для получения информации о питательной ценности продуктов. Ключи API хранятся в конфигурации сервера и не требуют дополнительной настройки со стороны клиента.
## Примеры использования
### JavaScript
```javascript
// Пример поиска продуктов
async function searchFoods(query) {
const response = await fetch(`http://localhost:8000/api/v1/nutrition/foods?query=${query}`, {
headers: { 'Authorization': `Bearer ${token}` }
});
return response.json();
}
// Пример добавления записи о питании
async function addNutritionEntry(entryData) {
const response = await fetch('http://localhost:8000/api/v1/nutrition/entries', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`
},
body: JSON.stringify(entryData)
});
return response.json();
}
```

View File

@@ -20,8 +20,13 @@ women-safety-backend/
│ ├── 📁 calendar_service/
│ │ ├── main.py # Calendar Service (8004)
│ │ └── models.py # Calendar models
── 📁 notification_service/
└── main.py # Notification Service (8005)
── 📁 notification_service/
└── main.py # Notification Service (8005)
│ └── 📁 nutrition_service/
│ ├── main.py # Nutrition Service (8006)
│ ├── models.py # Nutrition models
│ ├── schemas.py # Nutrition schemas
│ └── fatsecret_client.py # FatSecret API client
├── 📁 shared/ # Общие компоненты
│ ├── config.py # Конфигурация приложения

13
integrate_nutrition_service.sh Executable file
View File

@@ -0,0 +1,13 @@
#!/bin/bash
# Скрипт для интеграции сервиса питания в docker-compose.prod.yml
echo "Интеграция сервиса питания в docker-compose.prod.yml..."
# Находим место для вставки сервиса питания (после последнего определения сервиса)
LAST_SERVICE=$(grep -n "^ [a-zA-Z].*:" docker-compose.prod.yml | tail -1 | cut -d':' -f1)
# Вставляем определение сервиса питания после последнего сервиса и перед volumes
sed -i "${LAST_SERVICE}r nutrition-service-prod.yml" docker-compose.prod.yml
echo "Готово! Сервис питания добавлен в docker-compose.prod.yml"

View File

@@ -0,0 +1,44 @@
# Nutrition Service Cluster
nutrition-service-1:
image: women-safety/nutrition-service:${TAG:-latest}
environment:
- NODE_ID=1
- DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgres-primary:5432/women_safety_prod
- DATABASE_REPLICA_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgres-replica:5432/women_safety_prod
- REDIS_URL=redis://redis-cluster:6379/5
- FATSECRET_CLIENT_ID=${FATSECRET_CLIENT_ID}
- FATSECRET_CLIENT_SECRET=${FATSECRET_CLIENT_SECRET}
depends_on:
- postgres-primary
- redis-cluster
restart: always
deploy:
resources:
limits:
cpus: '1.0'
memory: 2G
reservations:
cpus: '0.5'
memory: 512M
nutrition-service-2:
image: women-safety/nutrition-service:${TAG:-latest}
environment:
- NODE_ID=2
- DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgres-primary:5432/women_safety_prod
- DATABASE_REPLICA_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgres-replica:5432/women_safety_prod
- REDIS_URL=redis://redis-cluster:6379/5
- FATSECRET_CLIENT_ID=${FATSECRET_CLIENT_ID}
- FATSECRET_CLIENT_SECRET=${FATSECRET_CLIENT_SECRET}
depends_on:
- postgres-primary
- redis-cluster
restart: always
deploy:
resources:
limits:
cpus: '1.0'
memory: 2G
reservations:
cpus: '0.5'
memory: 512M

View File

@@ -59,6 +59,7 @@ SERVICES = {
"location": os.getenv("LOCATION_SERVICE_URL", "http://localhost:8003"),
"calendar": os.getenv("CALENDAR_SERVICE_URL", "http://localhost:8004"),
"notifications": os.getenv("NOTIFICATION_SERVICE_URL", "http://localhost:8005"),
"nutrition": os.getenv("NUTRITION_SERVICE_URL", "http://localhost:8006"),
}
# Rate limiting (simple in-memory implementation)
@@ -732,6 +733,7 @@ async def root():
"location": "/api/v1/locations/update, /api/v1/locations/safe-places",
"calendar": "/api/v1/calendar/entries, /api/v1/calendar/cycle-overview",
"notifications": "/api/v1/notifications/devices, /api/v1/notifications/history",
"nutrition": "/api/v1/nutrition/foods, /api/v1/nutrition/daily-summary",
},
"docs": "/docs",
}

View File

@@ -0,0 +1,199 @@
import base64
import hashlib
import hmac
import json
import logging
import os
import random
import time
import urllib.parse
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Union
import httpx
from shared.config import settings
logger = logging.getLogger(__name__)
class FatSecretClient:
"""Клиент для работы с API FatSecret"""
BASE_URL = "https://platform.fatsecret.com/rest/server.api"
def __init__(self):
"""Инициализация клиента для работы с API FatSecret"""
# Используем CUSTOMER_KEY для OAuth 1.0, если он доступен, иначе CLIENT_ID
self.api_key = settings.FATSECRET_CUSTOMER_KEY or settings.FATSECRET_CLIENT_ID
self.api_secret = settings.FATSECRET_CLIENT_SECRET
# Логируем информацию о ключах (без полного раскрытия)
logger.info(f"FatSecretClient initialized with key: {self.api_key[:8]}...")
def _generate_oauth_params(self, http_method: str, url: str, params: Dict[str, Any]) -> Dict[str, Any]:
"""Создание и подписание OAuth 1.0 параметров"""
# Текущее время в секундах
timestamp = str(int(time.time()))
# Случайная строка для nonce
nonce = ''.join([str(random.randint(0, 9)) for _ in range(8)])
# Базовый набор параметров OAuth
oauth_params = {
'oauth_consumer_key': self.api_key,
'oauth_nonce': nonce,
'oauth_signature_method': 'HMAC-SHA1',
'oauth_timestamp': timestamp,
'oauth_version': '1.0'
}
# Объединяем с параметрами запроса
all_params = {**params, **oauth_params}
# Сортируем параметры по ключу
sorted_params = sorted(all_params.items())
# Создаем строку параметров для подписи
param_string = "&".join([
f"{urllib.parse.quote(str(k), safe='')}={urllib.parse.quote(str(v), safe='')}"
for k, v in sorted_params
])
# Создаем строку для подписи
signature_base = f"{http_method}&{urllib.parse.quote(url, safe='')}&{urllib.parse.quote(param_string, safe='')}"
# Создаем ключ для подписи
signing_key = f"{urllib.parse.quote(str(self.api_secret), safe='')}&"
# Создаем HMAC-SHA1 подпись
signature = base64.b64encode(
hmac.new(
signing_key.encode(),
signature_base.encode(),
hashlib.sha1
).digest()
).decode()
# Добавляем подпись к параметрам OAuth
all_params['oauth_signature'] = signature
return all_params
async def search_foods(self, query: str, page_number: int = 0, max_results: int = 10) -> Dict[str, Any]:
"""Поиск продуктов по запросу"""
params = {
'method': 'foods.search',
'search_expression': query,
'page_number': str(page_number),
'max_results': str(max_results),
'format': 'json'
}
# Получаем подписанные OAuth параметры
oauth_params = self._generate_oauth_params("GET", self.BASE_URL, params)
try:
async with httpx.AsyncClient() as client:
response = await client.get(
self.BASE_URL,
params=oauth_params
)
response.raise_for_status()
return response.json()
except Exception as e:
logger.error(f"Error searching foods: {e}")
raise
async def get_food_details(self, food_id: Union[str, int]) -> Dict[str, Any]:
"""Получить детальную информацию о продукте по ID"""
params = {
'method': 'food.get.v2',
'food_id': str(food_id),
'format': 'json'
}
# Получаем подписанные OAuth параметры
oauth_params = self._generate_oauth_params("GET", self.BASE_URL, params)
try:
async with httpx.AsyncClient() as client:
response = await client.get(
self.BASE_URL,
params=oauth_params
)
response.raise_for_status()
return response.json()
except Exception as e:
logger.error(f"Error getting food details: {e}")
raise
async def parse_food_data(self, food_json: Dict[str, Any]) -> Dict[str, Any]:
"""Разбирает данные о продукте из API в более удобный формат"""
try:
food = food_json.get('food', {})
# Извлечение основной информации о продукте
food_id = food.get('food_id')
food_name = food.get('food_name', '')
food_type = food.get('food_type', '')
brand_name = food.get('brand_name', '')
# Обработка информации о питании
servings = food.get('servings', {}).get('serving', [])
# Если есть только одна порция, преобразуем ее в список
if isinstance(servings, dict):
servings = [servings]
# Берем первую порцию по умолчанию (обычно это 100г или стандартная порция)
serving_data = {}
for serving in servings:
if serving.get('is_default_serving', 0) == "1" or serving.get('serving_description', '').lower() == '100g':
serving_data = serving
break
# Если не нашли стандартную порцию, берем первую
if not serving_data and servings:
serving_data = servings[0]
# Извлечение данных о пищевой ценности
serving_description = serving_data.get('serving_description', '')
serving_amount = serving_data.get('metric_serving_amount', serving_data.get('serving_amount', ''))
serving_unit = serving_data.get('metric_serving_unit', serving_data.get('serving_unit', ''))
# Формирование читаемого текста размера порции
serving_size = f"{serving_amount} {serving_unit}" if serving_amount and serving_unit else serving_description
# Извлечение данных о пищевой ценности
calories = float(serving_data.get('calories', 0) or 0)
protein = float(serving_data.get('protein', 0) or 0)
fat = float(serving_data.get('fat', 0) or 0)
carbs = float(serving_data.get('carbohydrate', 0) or 0)
fiber = float(serving_data.get('fiber', 0) or 0)
sugar = float(serving_data.get('sugar', 0) or 0)
sodium = float(serving_data.get('sodium', 0) or 0)
cholesterol = float(serving_data.get('cholesterol', 0) or 0)
# Формирование результата
result = {
"fatsecret_id": food_id,
"name": food_name,
"brand": brand_name,
"food_type": food_type,
"serving_size": serving_size,
"serving_weight_grams": float(serving_amount) if serving_unit == 'g' else None,
"calories": calories,
"protein_grams": protein,
"fat_grams": fat,
"carbs_grams": carbs,
"fiber_grams": fiber,
"sugar_grams": sugar,
"sodium_mg": sodium,
"cholesterol_mg": cholesterol,
"is_verified": True
}
return result
except Exception as e:
logger.error(f"Error parsing food data: {e}")
raise

View File

@@ -0,0 +1,462 @@
from datetime import date, datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import Depends, FastAPI, HTTPException, Query, Path, status
from fastapi.middleware.cors import CORSMiddleware
from sqlalchemy import and_, desc, select, func, text
from sqlalchemy.ext.asyncio import AsyncSession
from services.nutrition_service.models import (
FoodItem, UserNutritionEntry, WaterIntake,
UserActivityEntry, NutritionGoal
)
from services.nutrition_service.schemas import (
FoodItemCreate, FoodItemResponse, UserNutritionEntryCreate,
UserNutritionEntryResponse, WaterIntakeCreate, WaterIntakeResponse,
UserActivityEntryCreate, UserActivityEntryResponse,
NutritionGoalCreate, NutritionGoalResponse,
FoodSearchQuery, FoodDetailsQuery, DailyNutritionSummary
)
from services.nutrition_service.fatsecret_client import FatSecretClient
from shared.auth import get_current_user_from_token
from shared.config import settings
from shared.database import get_db
app = FastAPI(title="Nutrition Service", version="1.0.0")
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Создаем клиент FatSecret
fatsecret_client = FatSecretClient()
@app.get("/health")
async def health_check():
"""Health check endpoint"""
return {"status": "healthy", "service": "nutrition_service"}
# Эндпоинты для работы с API FatSecret
@app.post("/api/v1/nutrition/search", response_model=List[FoodItemResponse])
async def search_foods(
search_query: FoodSearchQuery,
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Поиск продуктов питания по запросу в FatSecret API"""
try:
# Вызов API FatSecret для поиска продуктов
search_results = await fatsecret_client.search_foods(
search_query.query,
search_query.page_number,
search_query.max_results
)
# Обработка результатов поиска
foods = []
if 'foods' in search_results and 'food' in search_results['foods']:
food_list = search_results['foods']['food']
# Если результат всего один, API возвращает словарь вместо списка
if isinstance(food_list, dict):
food_list = [food_list]
for food in food_list:
# Получение деталей о продукте
food_details = await fatsecret_client.get_food_details(food['food_id'])
parsed_food = await fatsecret_client.parse_food_data(food_details)
# Проверяем, существует ли продукт в базе данных
query = select(FoodItem).where(FoodItem.fatsecret_id == parsed_food['fatsecret_id'])
result = await db.execute(query)
db_food = result.scalars().first()
# Если продукт не существует, сохраняем его
if not db_food:
db_food = FoodItem(**parsed_food)
db.add(db_food)
await db.commit()
await db.refresh(db_food)
foods.append(FoodItemResponse.model_validate(db_food))
return foods
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error searching foods: {str(e)}"
)
@app.get("/api/v1/nutrition/food/{food_id}", response_model=FoodItemResponse)
async def get_food_details(
food_id: int = Path(..., description="ID продукта в базе данных"),
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Получение детальной информации о продукте по ID из базы данных"""
query = select(FoodItem).where(FoodItem.id == food_id)
result = await db.execute(query)
food = result.scalars().first()
if not food:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Food item not found"
)
return FoodItemResponse.model_validate(food)
@app.get("/api/v1/nutrition/fatsecret/{fatsecret_id}", response_model=FoodItemResponse)
async def get_food_by_fatsecret_id(
fatsecret_id: str = Path(..., description="ID продукта в FatSecret"),
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Получение детальной информации о продукте по FatSecret ID"""
# Проверяем, есть ли продукт в нашей базе данных
query = select(FoodItem).where(FoodItem.fatsecret_id == fatsecret_id)
result = await db.execute(query)
food = result.scalars().first()
# Если продукт не найден в базе, запрашиваем его с FatSecret API
if not food:
try:
food_details = await fatsecret_client.get_food_details(fatsecret_id)
parsed_food = await fatsecret_client.parse_food_data(food_details)
food = FoodItem(**parsed_food)
db.add(food)
await db.commit()
await db.refresh(food)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error fetching food details: {str(e)}"
)
return FoodItemResponse.model_validate(food)
# Эндпоинты для работы с записями питания пользователя
@app.post("/api/v1/nutrition/entries", response_model=UserNutritionEntryResponse)
async def create_nutrition_entry(
entry_data: UserNutritionEntryCreate,
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Создание новой записи о питании пользователя"""
# Получаем ID пользователя из токена
user_id = user_data["user_id"]
# Если указан ID продукта, проверяем его наличие
food_item = None
if entry_data.food_item_id:
query = select(FoodItem).where(FoodItem.id == entry_data.food_item_id)
result = await db.execute(query)
food_item = result.scalars().first()
if not food_item:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Food item not found"
)
# Создаем данные для записи
nutrition_data = entry_data.model_dump(exclude={"food_item_id"})
nutrition_entry = UserNutritionEntry(**nutrition_data, user_id=user_id)
if food_item:
nutrition_entry.food_item_id = food_item.id
# Если питательные данные не указаны, рассчитываем их на основе продукта
if not entry_data.calories and food_item.calories:
nutrition_entry.calories = food_item.calories * entry_data.quantity
if not entry_data.protein_grams and food_item.protein_grams:
nutrition_entry.protein_grams = food_item.protein_grams * entry_data.quantity
if not entry_data.fat_grams and food_item.fat_grams:
nutrition_entry.fat_grams = food_item.fat_grams * entry_data.quantity
if not entry_data.carbs_grams and food_item.carbs_grams:
nutrition_entry.carbs_grams = food_item.carbs_grams * entry_data.quantity
db.add(nutrition_entry)
await db.commit()
await db.refresh(nutrition_entry)
return UserNutritionEntryResponse.model_validate(nutrition_entry)
@app.get("/api/v1/nutrition/entries", response_model=List[UserNutritionEntryResponse])
async def get_user_nutrition_entries(
start_date: date = Query(..., description="Начальная дата для выборки"),
end_date: date = Query(..., description="Конечная дата для выборки"),
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Получение записей о питании пользователя за указанный период"""
user_id = user_data["user_id"]
query = (
select(UserNutritionEntry)
.where(
and_(
UserNutritionEntry.user_id == user_id,
UserNutritionEntry.entry_date >= start_date,
UserNutritionEntry.entry_date <= end_date
)
)
.order_by(UserNutritionEntry.entry_date, UserNutritionEntry.meal_type)
)
result = await db.execute(query)
entries = result.scalars().all()
return [UserNutritionEntryResponse.model_validate(entry) for entry in entries]
# Эндпоинты для работы с записями о потреблении воды
@app.post("/api/v1/nutrition/water", response_model=WaterIntakeResponse)
async def create_water_intake(
intake_data: WaterIntakeCreate,
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Создание новой записи о потреблении воды"""
user_id = user_data["user_id"]
water_intake = WaterIntake(**intake_data.model_dump(), user_id=user_id)
db.add(water_intake)
await db.commit()
await db.refresh(water_intake)
return WaterIntakeResponse.model_validate(water_intake)
@app.get("/api/v1/nutrition/water", response_model=List[WaterIntakeResponse])
async def get_user_water_intake(
start_date: date = Query(..., description="Начальная дата для выборки"),
end_date: date = Query(..., description="Конечная дата для выборки"),
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Получение записей о потреблении воды за указанный период"""
user_id = user_data["user_id"]
query = (
select(WaterIntake)
.where(
and_(
WaterIntake.user_id == user_id,
WaterIntake.entry_date >= start_date,
WaterIntake.entry_date <= end_date
)
)
.order_by(WaterIntake.entry_date, WaterIntake.entry_time)
)
result = await db.execute(query)
entries = result.scalars().all()
return [WaterIntakeResponse.model_validate(entry) for entry in entries]
# Эндпоинты для работы с записями о физической активности
@app.post("/api/v1/nutrition/activity", response_model=UserActivityEntryResponse)
async def create_activity_entry(
activity_data: UserActivityEntryCreate,
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Создание новой записи о физической активности"""
user_id = user_data["user_id"]
# Если не указаны сожженные калории, рассчитываем примерно
if not activity_data.calories_burned:
# Простой расчет на основе типа активности и продолжительности
# Точный расчет требует больше параметров (вес, рост, возраст, пол)
activity_intensity = {
"walking": 5, # ккал/мин
"running": 10,
"cycling": 8,
"swimming": 9,
"yoga": 4,
"weight_training": 6,
"hiit": 12,
"pilates": 5,
}
activity_type = activity_data.activity_type.lower()
intensity = activity_intensity.get(activity_type, 5) # По умолчанию 5 ккал/мин
# Увеличиваем интенсивность в зависимости от указанной интенсивности
if activity_data.intensity == "high":
intensity *= 1.5
elif activity_data.intensity == "low":
intensity *= 0.8
calories_burned = intensity * activity_data.duration_minutes
activity_data.calories_burned = round(calories_burned, 1)
activity_entry = UserActivityEntry(**activity_data.model_dump(), user_id=user_id)
db.add(activity_entry)
await db.commit()
await db.refresh(activity_entry)
return UserActivityEntryResponse.model_validate(activity_entry)
@app.get("/api/v1/nutrition/activity", response_model=List[UserActivityEntryResponse])
async def get_user_activities(
start_date: date = Query(..., description="Начальная дата для выборки"),
end_date: date = Query(..., description="Конечная дата для выборки"),
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Получение записей о физической активности за указанный период"""
user_id = user_data["user_id"]
query = (
select(UserActivityEntry)
.where(
and_(
UserActivityEntry.user_id == user_id,
UserActivityEntry.entry_date >= start_date,
UserActivityEntry.entry_date <= end_date
)
)
.order_by(UserActivityEntry.entry_date, UserActivityEntry.created_at)
)
result = await db.execute(query)
entries = result.scalars().all()
return [UserActivityEntryResponse.model_validate(entry) for entry in entries]
# Эндпоинты для работы с целями питания
@app.post("/api/v1/nutrition/goals", response_model=NutritionGoalResponse)
async def create_or_update_nutrition_goals(
goal_data: NutritionGoalCreate,
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Создание или обновление целей по питанию и активности"""
user_id = user_data["user_id"]
# Проверяем, существуют ли уже цели для пользователя
query = select(NutritionGoal).where(NutritionGoal.user_id == user_id)
result = await db.execute(query)
existing_goal = result.scalars().first()
if existing_goal:
# Обновляем существующую цель
for key, value in goal_data.model_dump(exclude_unset=True).items():
setattr(existing_goal, key, value)
await db.commit()
await db.refresh(existing_goal)
return NutritionGoalResponse.model_validate(existing_goal)
else:
# Создаем новую цель
new_goal = NutritionGoal(**goal_data.model_dump(), user_id=user_id)
db.add(new_goal)
await db.commit()
await db.refresh(new_goal)
return NutritionGoalResponse.model_validate(new_goal)
@app.get("/api/v1/nutrition/goals", response_model=NutritionGoalResponse)
async def get_nutrition_goals(
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Получение целей пользователя по питанию и активности"""
user_id = user_data["user_id"]
query = select(NutritionGoal).where(NutritionGoal.user_id == user_id)
result = await db.execute(query)
goal = result.scalars().first()
if not goal:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Nutrition goals not found for this user"
)
return NutritionGoalResponse.model_validate(goal)
# Сводные отчеты
@app.get("/api/v1/nutrition/summary/daily", response_model=DailyNutritionSummary)
async def get_daily_nutrition_summary(
target_date: date = Query(..., description="Дата для получения сводки"),
user_data: dict = Depends(get_current_user_from_token),
db: AsyncSession = Depends(get_db)
):
"""Получение дневной сводки по питанию, потреблению воды и физической активности"""
user_id = user_data["user_id"]
# Запрос записей о питании
meals_query = select(UserNutritionEntry).where(
and_(
UserNutritionEntry.user_id == user_id,
UserNutritionEntry.entry_date == target_date
)
).order_by(UserNutritionEntry.meal_type)
meals_result = await db.execute(meals_query)
meals = meals_result.scalars().all()
# Запрос записей о воде
water_query = select(WaterIntake).where(
and_(
WaterIntake.user_id == user_id,
WaterIntake.entry_date == target_date
)
).order_by(WaterIntake.entry_time)
water_result = await db.execute(water_query)
water_entries = water_result.scalars().all()
# Запрос записей об активности
activity_query = select(UserActivityEntry).where(
and_(
UserActivityEntry.user_id == user_id,
UserActivityEntry.entry_date == target_date
)
).order_by(UserActivityEntry.created_at)
activity_result = await db.execute(activity_query)
activity_entries = activity_result.scalars().all()
# Расчет суммарных значений
total_calories = sum(meal.calories or 0 for meal in meals)
total_protein = sum(meal.protein_grams or 0 for meal in meals)
total_fat = sum(meal.fat_grams or 0 for meal in meals)
total_carbs = sum(meal.carbs_grams or 0 for meal in meals)
total_water = sum(water.amount_ml for water in water_entries)
total_activity = sum(activity.duration_minutes for activity in activity_entries)
calories_burned = sum(activity.calories_burned or 0 for activity in activity_entries)
# Формирование ответа
summary = DailyNutritionSummary(
date=target_date,
total_calories=total_calories,
total_protein_grams=total_protein,
total_fat_grams=total_fat,
total_carbs_grams=total_carbs,
total_water_ml=total_water,
total_activity_minutes=total_activity,
estimated_calories_burned=calories_burned,
meals=[UserNutritionEntryResponse.model_validate(meal) for meal in meals],
water_entries=[WaterIntakeResponse.model_validate(water) for water in water_entries],
activity_entries=[UserActivityEntryResponse.model_validate(activity) for activity in activity_entries]
)
return summary

View File

@@ -0,0 +1,146 @@
import uuid
from sqlalchemy import Boolean, Column, Date, Float, Integer, String, Text, ForeignKey
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.sql import func
from sqlalchemy.sql.expression import text
from sqlalchemy.sql.sqltypes import TIMESTAMP
from shared.database import BaseModel
class FoodItem(BaseModel):
"""Модель для хранения информации о продуктах питания"""
__tablename__ = "food_items"
uuid = Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, index=True)
# Основная информация о продукте
fatsecret_id = Column(String(50), unique=True, index=True, nullable=True) # ID продукта в FatSecret
name = Column(String(255), nullable=False)
brand = Column(String(255), nullable=True)
description = Column(Text, nullable=True)
food_type = Column(String(50), nullable=True) # generic, branded, etc.
serving_size = Column(String(100), nullable=True) # e.g. "1 cup" or "100g"
serving_weight_grams = Column(Float, nullable=True)
# Пищевая ценность на порцию
calories = Column(Float, nullable=True) # kcal
protein_grams = Column(Float, nullable=True)
fat_grams = Column(Float, nullable=True)
carbs_grams = Column(Float, nullable=True)
fiber_grams = Column(Float, nullable=True)
sugar_grams = Column(Float, nullable=True)
sodium_mg = Column(Float, nullable=True)
cholesterol_mg = Column(Float, nullable=True)
# Дополнительная информация
ingredients = Column(Text, nullable=True)
is_verified = Column(Boolean, default=False) # Проверенные данные или пользовательские
created_at = Column(TIMESTAMP(timezone=True), nullable=False, server_default=func.now())
updated_at = Column(TIMESTAMP(timezone=True), onupdate=func.now())
def __repr__(self):
return f"<FoodItem {self.name}>"
class UserNutritionEntry(BaseModel):
"""Модель для хранения записей пользователя о потреблении пищи"""
__tablename__ = "user_nutrition_entries"
uuid = Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, index=True)
user_id = Column(Integer, nullable=False, index=True) # Связь с таблицей пользователей
# Информация о приеме пищи
entry_date = Column(Date, nullable=False, index=True)
meal_type = Column(String(50), nullable=False) # breakfast, lunch, dinner, snack
food_item_id = Column(Integer, ForeignKey("food_items.id"), nullable=True)
custom_food_name = Column(String(255), nullable=True) # Если продукт не из базы
# Количество
quantity = Column(Float, nullable=False, default=1.0)
unit = Column(String(50), nullable=True) # g, ml, oz, piece, etc.
# Рассчитанная пищевая ценность для данного количества
calories = Column(Float, nullable=True)
protein_grams = Column(Float, nullable=True)
fat_grams = Column(Float, nullable=True)
carbs_grams = Column(Float, nullable=True)
# Метаданные
notes = Column(Text, nullable=True)
created_at = Column(TIMESTAMP(timezone=True), nullable=False, server_default=func.now())
updated_at = Column(TIMESTAMP(timezone=True), onupdate=func.now())
def __repr__(self):
return f"<UserNutritionEntry user_id={self.user_id} date={self.entry_date} meal={self.meal_type}>"
class WaterIntake(BaseModel):
"""Модель для отслеживания потребления воды"""
__tablename__ = "water_intake"
uuid = Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, index=True)
user_id = Column(Integer, nullable=False, index=True) # Связь с таблицей пользователей
entry_date = Column(Date, nullable=False, index=True)
amount_ml = Column(Integer, nullable=False) # Количество в миллилитрах
entry_time = Column(TIMESTAMP(timezone=True), nullable=False, server_default=func.now())
notes = Column(Text, nullable=True)
def __repr__(self):
return f"<WaterIntake user_id={self.user_id} date={self.entry_date} amount={self.amount_ml}ml>"
class UserActivityEntry(BaseModel):
"""Модель для отслеживания физической активности"""
__tablename__ = "user_activity_entries"
uuid = Column(UUID(as_uuid=True), default=uuid.uuid4, unique=True, index=True)
user_id = Column(Integer, nullable=False, index=True) # Связь с таблицей пользователей
entry_date = Column(Date, nullable=False, index=True)
activity_type = Column(String(100), nullable=False) # walking, running, yoga, etc.
duration_minutes = Column(Integer, nullable=False)
calories_burned = Column(Float, nullable=True) # Расчетное количество сожженных калорий
# Дополнительные параметры активности
distance_km = Column(Float, nullable=True) # Для активностей с расстоянием
steps = Column(Integer, nullable=True) # Для ходьбы
intensity = Column(String(20), nullable=True) # low, medium, high
notes = Column(Text, nullable=True)
created_at = Column(TIMESTAMP(timezone=True), nullable=False, server_default=func.now())
def __repr__(self):
return f"<UserActivityEntry user_id={self.user_id} date={self.entry_date} activity={self.activity_type}>"
class NutritionGoal(BaseModel):
"""Модель для хранения целей пользователя по питанию и активности"""
__tablename__ = "nutrition_goals"
user_id = Column(Integer, nullable=False, index=True, unique=True) # Связь с таблицей пользователей
# Цели по калориям и макронутриентам
daily_calorie_goal = Column(Integer, nullable=True)
protein_goal_grams = Column(Integer, nullable=True)
fat_goal_grams = Column(Integer, nullable=True)
carbs_goal_grams = Column(Integer, nullable=True)
# Цели по воде и активности
water_goal_ml = Column(Integer, nullable=True, default=2000) # Стандартно 2 литра
activity_goal_minutes = Column(Integer, nullable=True, default=30) # Минимум 30 минут активности
# Цель по весу и предпочтения
weight_goal_kg = Column(Float, nullable=True)
goal_type = Column(String(50), nullable=True) # lose_weight, maintain, gain_weight, health
created_at = Column(TIMESTAMP(timezone=True), nullable=False, server_default=func.now())
updated_at = Column(TIMESTAMP(timezone=True), onupdate=func.now())
def __repr__(self):
return f"<NutritionGoal user_id={self.user_id} calories={self.daily_calorie_goal}>"

View File

@@ -0,0 +1,203 @@
from datetime import date
from enum import Enum
from typing import List, Optional
from pydantic import BaseModel, Field, root_validator
class MealType(str, Enum):
BREAKFAST = "breakfast"
LUNCH = "lunch"
DINNER = "dinner"
SNACK = "snack"
class ActivityIntensity(str, Enum):
LOW = "low"
MEDIUM = "medium"
HIGH = "high"
class GoalType(str, Enum):
LOSE_WEIGHT = "lose_weight"
MAINTAIN = "maintain"
GAIN_WEIGHT = "gain_weight"
HEALTH = "health"
# Схемы для FoodItem
class FoodItemBase(BaseModel):
name: str
brand: Optional[str] = None
description: Optional[str] = None
food_type: Optional[str] = None
serving_size: Optional[str] = None
serving_weight_grams: Optional[float] = None
calories: Optional[float] = None
protein_grams: Optional[float] = None
fat_grams: Optional[float] = None
carbs_grams: Optional[float] = None
fiber_grams: Optional[float] = None
sugar_grams: Optional[float] = None
sodium_mg: Optional[float] = None
cholesterol_mg: Optional[float] = None
ingredients: Optional[str] = None
class FoodItemCreate(FoodItemBase):
fatsecret_id: Optional[str] = None
is_verified: bool = False
class FoodItemResponse(FoodItemBase):
id: int
uuid: str
fatsecret_id: Optional[str] = None
is_verified: bool
created_at: str
updated_at: Optional[str] = None
class Config:
from_attributes = True
# Схемы для UserNutritionEntry
class UserNutritionEntryBase(BaseModel):
entry_date: date
meal_type: MealType
quantity: float = Field(gt=0)
unit: Optional[str] = None
notes: Optional[str] = None
class UserNutritionEntryCreate(UserNutritionEntryBase):
food_item_id: Optional[int] = None
custom_food_name: Optional[str] = None
calories: Optional[float] = None
protein_grams: Optional[float] = None
fat_grams: Optional[float] = None
carbs_grams: Optional[float] = None
@root_validator(skip_on_failure=True)
def check_food_info(cls, values):
food_item_id = values.get("food_item_id")
custom_food_name = values.get("custom_food_name")
if food_item_id is None and not custom_food_name:
raise ValueError("Either food_item_id or custom_food_name must be provided")
return values
class UserNutritionEntryResponse(UserNutritionEntryBase):
id: int
uuid: str
user_id: int
food_item_id: Optional[int] = None
custom_food_name: Optional[str] = None
calories: Optional[float] = None
protein_grams: Optional[float] = None
fat_grams: Optional[float] = None
carbs_grams: Optional[float] = None
created_at: str
class Config:
from_attributes = True
# Схемы для WaterIntake
class WaterIntakeBase(BaseModel):
entry_date: date
amount_ml: int = Field(gt=0)
notes: Optional[str] = None
class WaterIntakeCreate(WaterIntakeBase):
pass
class WaterIntakeResponse(WaterIntakeBase):
id: int
uuid: str
user_id: int
entry_time: str
class Config:
from_attributes = True
# Схемы для UserActivityEntry
class UserActivityEntryBase(BaseModel):
entry_date: date
activity_type: str
duration_minutes: int = Field(gt=0)
distance_km: Optional[float] = None
steps: Optional[int] = None
intensity: Optional[ActivityIntensity] = None
notes: Optional[str] = None
class UserActivityEntryCreate(UserActivityEntryBase):
calories_burned: Optional[float] = None
class UserActivityEntryResponse(UserActivityEntryBase):
id: int
uuid: str
user_id: int
calories_burned: Optional[float] = None
created_at: str
class Config:
from_attributes = True
# Схемы для NutritionGoal
class NutritionGoalBase(BaseModel):
daily_calorie_goal: Optional[int] = None
protein_goal_grams: Optional[int] = None
fat_goal_grams: Optional[int] = None
carbs_goal_grams: Optional[int] = None
water_goal_ml: Optional[int] = None
activity_goal_minutes: Optional[int] = None
weight_goal_kg: Optional[float] = None
goal_type: Optional[GoalType] = None
class NutritionGoalCreate(NutritionGoalBase):
pass
class NutritionGoalResponse(NutritionGoalBase):
id: int
user_id: int
created_at: str
updated_at: Optional[str] = None
class Config:
from_attributes = True
# Схемы для запросов к FatSecret API
class FoodSearchQuery(BaseModel):
query: str
page_number: int = 0
max_results: int = 10
class FoodDetailsQuery(BaseModel):
food_id: str
# Схемы для сводных данных
class DailyNutritionSummary(BaseModel):
date: date
total_calories: float = 0
total_protein_grams: float = 0
total_fat_grams: float = 0
total_carbs_grams: float = 0
total_water_ml: int = 0
total_activity_minutes: int = 0
estimated_calories_burned: float = 0
meals: List[UserNutritionEntryResponse] = []
water_entries: List[WaterIntakeResponse] = []
activity_entries: List[UserActivityEntryResponse] = []

View File

@@ -85,11 +85,6 @@ async def register_user(user_data: UserCreate, db: AsyncSession = Depends(get_db
try:
hashed_password = get_password_hash(user_data.password)
except ValueError as e:
if "password cannot be longer than 72 bytes" in str(e):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Password is too long. Please use a shorter password (max 70 characters)."
)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Password validation error: {str(e)}"

View File

@@ -41,15 +41,15 @@ class UserBase(BaseModel):
class UserCreate(UserBase):
password: str = Field(..., min_length=8, max_length=70, description="Password (will be truncated to 72 bytes for bcrypt compatibility)")
password: str = Field(..., min_length=8, description="Password for user registration")
@field_validator("password")
@classmethod
def validate_password_bytes(cls, v):
"""Ensure password doesn't exceed bcrypt's 72-byte limit."""
password_bytes = v.encode('utf-8')
if len(password_bytes) > 72:
raise ValueError("Password is too long when encoded as UTF-8 (max 72 bytes for bcrypt)")
"""Basic validation for password."""
# Только проверка минимальной длины
if not v or len(v.strip()) < 8:
raise ValueError("Password must be at least 8 characters")
return v
@@ -102,17 +102,15 @@ class UserResponse(UserBase):
class UserLogin(BaseModel):
email: Optional[EmailStr] = None
username: Optional[str] = None
password: str = Field(..., max_length=70, description="Password (will be truncated to 72 bytes for bcrypt compatibility)")
password: str = Field(..., min_length=1, description="Password for authentication")
@field_validator("password")
@classmethod
def validate_password_bytes(cls, v):
"""Ensure password doesn't exceed bcrypt's 72-byte limit."""
"""Basic password validation."""
if not v or len(v.strip()) == 0:
raise ValueError("Password cannot be empty")
password_bytes = v.encode('utf-8')
if len(password_bytes) > 72:
raise ValueError("Password is too long when encoded as UTF-8 (max 72 bytes for bcrypt)")
# Не делаем проверку на максимальную длину - passlib/bcrypt сам справится с ограничениями
return v
@field_validator("username")

View File

@@ -18,8 +18,13 @@ from shared.config import settings
# Suppress bcrypt version warnings
logging.getLogger("passlib").setLevel(logging.ERROR)
# Password hashing
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
# Password hashing - настройка bcrypt с более надежными параметрами
pwd_context = CryptContext(
schemes=["bcrypt"],
deprecated="auto",
bcrypt__rounds=12, # Стандартное количество раундов
bcrypt__truncate_error=False # Не вызывать ошибку при длинных паролях, а просто обрезать
)
# Bearer token scheme
security = HTTPBearer()
@@ -28,29 +33,32 @@ security = HTTPBearer()
def verify_password(plain_password: str, hashed_password: str) -> bool:
"""Verify a password against its hash. Handle bcrypt compatibility issues."""
try:
# Truncate password to 72 bytes for consistency
password_bytes = plain_password.encode('utf-8')
if len(password_bytes) > 72:
plain_password = password_bytes[:72].decode('utf-8', errors='ignore')
return pwd_context.verify(plain_password, hashed_password)
# Увеличим подробность логов
logging.info(f"Verifying password length: {len(plain_password)} chars")
# Проверяем пароль с помощью passlib и логируем результат
result = pwd_context.verify(plain_password, hashed_password)
logging.info(f"Password verification result: {result}")
return result
except Exception as e:
logging.error(f"Error verifying password: {e}")
logging.error(f"Error verifying password: {e}, hash_type: {hashed_password[:10]}...")
return False
def get_password_hash(password: str) -> str:
"""Get password hash. Truncate password to 72 bytes if necessary for bcrypt compatibility."""
"""Get password hash. Let passlib handle bcrypt compatibility."""
try:
# bcrypt has a 72-byte limit, so truncate if necessary
password_bytes = password.encode('utf-8')
if len(password_bytes) > 72:
logging.warning("Password exceeds bcrypt limit of 72 bytes. Truncating.")
password = password_bytes[:70].decode('utf-8', errors='ignore')
return pwd_context.hash(password)
# Увеличим подробность логов
logging.info(f"Hashing password length: {len(password)} chars")
# bcrypt автоматически ограничит длину пароля до 72 байт
hashed = pwd_context.hash(password)
logging.info("Password hashed successfully")
return hashed
except Exception as e:
# Handle bcrypt compatibility issues
# Логируем ошибку и пробрасываем исключение
logging.error(f"Error hashing password: {e}")
raise ValueError("Password hashing failed. Please use a shorter password.")
raise ValueError(f"Password hashing failed: {str(e)}")
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str:

View File

@@ -41,6 +41,11 @@ class Settings(BaseSettings):
# External Services
FCM_SERVER_KEY: Optional[str] = None
# FatSecret API для данных о питании
FATSECRET_CLIENT_ID: str = "56342dd56fc74b26afb49d65b8f84c16"
FATSECRET_CLIENT_SECRET: str = "fae178f189dc44ddb368cabe9069c0e3"
FATSECRET_CUSTOMER_KEY: Optional[str] = None # Исправляем опечатку в имени параметра
# Security
CORS_ORIGINS: list = ["*"] # Change in production

View File

@@ -89,6 +89,10 @@ echo -e "${YELLOW}Starting Notification Service (port 8005)...${NC}"
python -m uvicorn services.notification_service.main:app --port 8005 &
NOTIFICATION_PID=$!
echo -e "${YELLOW}Starting Nutrition Service (port 8006)...${NC}"
python -m uvicorn services.nutrition_service.main:app --port 8006 &
NUTRITION_PID=$!
# Wait a bit for services to start
sleep 5
@@ -102,6 +106,7 @@ echo $EMERGENCY_PID > emergency_service.pid
echo $LOCATION_PID > location_service.pid
echo $CALENDAR_PID > calendar_service.pid
echo $NOTIFICATION_PID > notification_service.pid
echo $NUTRITION_PID > nutrition_service.pid
echo $GATEWAY_PID > api_gateway.pid
echo -e "${GREEN}🎉 All services started successfully!${NC}"
@@ -112,6 +117,7 @@ echo -e " 🚨 Emergency Service: http://localhost:8002"
echo -e " 📍 Location Service: http://localhost:8003"
echo -e " 📅 Calendar Service: http://localhost:8004"
echo -e " 🔔 Notification Service: http://localhost:8005"
echo -e " 🍎 Nutrition Service: http://localhost:8006"
echo -e "${GREEN}📖 API Documentation: http://localhost:8000/docs${NC}"
# Keep script running and show logs
@@ -127,6 +133,7 @@ cleanup() {
if [ -f "location_service.pid" ]; then kill "$(cat location_service.pid)" 2>/dev/null && rm location_service.pid; fi
if [ -f "calendar_service.pid" ]; then kill "$(cat calendar_service.pid)" 2>/dev/null && rm calendar_service.pid; fi
if [ -f "notification_service.pid" ]; then kill "$(cat notification_service.pid)" 2>/dev/null && rm notification_service.pid; fi
if [ -f "nutrition_service.pid" ]; then kill "$(cat nutrition_service.pid)" 2>/dev/null && rm nutrition_service.pid; fi
if [ -f "api_gateway.pid" ]; then kill "$(cat api_gateway.pid)" 2>/dev/null && rm api_gateway.pid; fi
echo -e "${GREEN}✅ All services stopped${NC}"

View File

@@ -51,6 +51,7 @@ cleanup() {
kill_port 8003
kill_port 8004
kill_port 8005
kill_port 8006
echo "✅ All services stopped"
exit 0
}
@@ -66,6 +67,7 @@ kill_port 8002
kill_port 8003
kill_port 8004
kill_port 8005
kill_port 8006
echo "⏳ Waiting for ports to be freed..."
sleep 3
@@ -94,6 +96,10 @@ echo "Starting Calendar Service (port 8004)..."
echo "Starting Notification Service (port 8005)..."
(cd services/notification_service && PYTHONPATH="${PWD}/../..:${PYTHONPATH}" python -m uvicorn main:app --host 0.0.0.0 --port 8005 --reload) &
# Start Nutrition Service
echo "Starting Nutrition Service (port 8006)..."
(cd services/nutrition_service && PYTHONPATH="${PWD}/../..:${PYTHONPATH}" python -m uvicorn main:app --host 0.0.0.0 --port 8006 --reload) &
# Start API Gateway
echo "Starting API Gateway (port 8000)..."
(cd services/api_gateway && PYTHONPATH="${PWD}/../..:${PYTHONPATH}" python -m uvicorn main:app --host 0.0.0.0 --port 8000 --reload) &
@@ -110,6 +116,7 @@ echo " 🚨 Emergency Service: http://localhost:8002"
echo " 📍 Location Service: http://localhost:8003"
echo " 📅 Calendar Service: http://localhost:8004"
echo " 🔔 Notification Service: http://localhost:8005"
echo " 🍎 Nutrition Service: http://localhost:8006"
echo ""
echo "📖 API Documentation: http://localhost:8000/docs"
echo "📊 Monitoring services... Press Ctrl+C to stop all services"

View File

@@ -42,8 +42,14 @@ if [ -f "notification_service.pid" ]; then
echo -e "${GREEN}✅ Notification Service stopped${NC}"
fi
if [ -f "nutrition_service.pid" ]; then
kill "$(cat nutrition_service.pid)" 2>/dev/null
rm nutrition_service.pid
echo -e "${GREEN}✅ Nutrition Service stopped${NC}"
fi
if [ -f "api_gateway.pid" ]; then
kill $(cat api_gateway.pid) 2>/dev/null
kill "$(cat api_gateway.pid)" 2>/dev/null
rm api_gateway.pid
echo -e "${GREEN}✅ API Gateway stopped${NC}"
fi

248
test_fatsecret_api.py Executable file
View File

@@ -0,0 +1,248 @@
#!/usr/bin/env python3
"""
Скрипт для тестирования API FatSecret
Выполняет тестовые запросы к API FatSecret с использованием ключей из конфигурации приложения
"""
import os
import json
import time
import base64
import asyncio
import httpx
import urllib.parse
import hmac
import hashlib
from datetime import datetime
from dotenv import load_dotenv
# Загружаем .env файл
current_dir = os.path.dirname(os.path.abspath(__file__))
env_path = os.path.join(current_dir, ".env")
load_dotenv(env_path)
print(f"✅ Loaded .env from: {env_path}")
# Получаем API ключи из переменных окружения
FATSECRET_CLIENT_ID = os.environ.get("FATSECRET_CLIENT_ID")
FATSECRET_CLIENT_SECRET = os.environ.get("FATSECRET_CLIENT_SECRET")
FATSECRET_CUSTOMER_KEY = os.environ.get("FATSECRET_CUSTOMER_KEY")
if not FATSECRET_CLIENT_ID or not FATSECRET_CLIENT_SECRET:
raise ValueError("FatSecret API keys not found in .env file")
print(f"🔑 Using FatSecret API keys: CLIENT_ID={FATSECRET_CLIENT_ID[:8]}...")
if FATSECRET_CUSTOMER_KEY:
print(f"🔑 Using CUSTOMER_KEY={FATSECRET_CUSTOMER_KEY[:8]}...")
class FatSecretClient:
"""Клиент для работы с API FatSecret"""
BASE_URL = "https://platform.fatsecret.com/rest/server.api"
def __init__(self, client_id, client_secret):
self.client_id = client_id
self.client_secret = client_secret
self.access_token = None
self.token_expires = 0
async def get_access_token(self):
"""Получение OAuth 2.0 токена для доступа к API"""
now = time.time()
# Если у нас уже есть токен и он не истек, используем его
if self.access_token and self.token_expires > now + 60:
return self.access_token
print("🔄 Getting new access token...")
# Подготовка запроса на получение токена
auth_header = base64.b64encode(f"{self.client_id}:{self.client_secret}".encode()).decode()
print(f"🔑 Using client_id: {self.client_id}")
# Не печатаем секрет полностью, только первые несколько символов для отладки
print(f"🔑 Using client_secret: {self.client_secret[:5]}...")
async with httpx.AsyncClient() as client:
response = await client.post(
"https://oauth.fatsecret.com/connect/token",
headers={
"Authorization": f"Basic {auth_header}",
"Content-Type": "application/x-www-form-urlencoded"
},
data={
"grant_type": "client_credentials",
"scope": "basic premier"
}
)
# Проверяем успешность запроса
if response.status_code != 200:
print(f"❌ Error getting token: {response.status_code}")
print(response.text)
raise Exception(f"Failed to get token: {response.status_code}")
token_data = response.json()
self.access_token = token_data["access_token"]
self.token_expires = now + token_data["expires_in"]
print(f"✅ Got token, expires in {token_data['expires_in']} seconds")
return self.access_token
async def search_food(self, query, page=0, max_results=10):
"""Поиск продуктов по названию"""
token = await self.get_access_token()
async with httpx.AsyncClient() as client:
response = await client.post(
self.BASE_URL,
headers={
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
},
json={
"method": "foods.search",
"search_expression": query,
"page_number": page,
"max_results": max_results,
"format": "json"
}
)
if response.status_code != 200:
print(f"❌ Error searching food: {response.status_code}")
print(response.text)
raise Exception(f"Failed to search food: {response.status_code}")
return response.json()
async def get_food(self, food_id):
"""Получение детальной информации о продукте по ID"""
token = await self.get_access_token()
async with httpx.AsyncClient() as client:
response = await client.post(
self.BASE_URL,
headers={
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
},
json={
"method": "food.get",
"food_id": food_id,
"format": "json"
}
)
if response.status_code != 200:
print(f"❌ Error getting food details: {response.status_code}")
print(response.text)
raise Exception(f"Failed to get food details: {response.status_code}")
return response.json()
async def run_tests():
"""Выполнение тестовых запросов к API FatSecret"""
client = FatSecretClient(FATSECRET_CLIENT_ID, FATSECRET_CLIENT_SECRET)
# Тест 1: Поиск продуктов
print("\n🔍 Testing food search...")
search_queries = ["apple", "bread", "chicken breast", "молоко"]
for query in search_queries:
print(f"\n📋 Searching for: {query}")
try:
result = await client.search_food(query)
# Проверяем структуру ответа
if "foods" not in result:
print(f"❌ Unexpected response format: {result}")
continue
# Если нет результатов
if "food" not in result["foods"]:
print(f"⚠️ No results found for '{query}'")
continue
food_list = result["foods"]["food"]
if not isinstance(food_list, list):
food_list = [food_list] # Если только один результат, оборачиваем в список
print(f"✅ Found {len(food_list)} results")
# Выводим первые 3 результата
first_food_id = None
for i, food in enumerate(food_list[:3]):
food_name = food.get("food_name", "Unknown")
food_id = food.get("food_id", "Unknown")
food_desc = food.get("food_description", "No description")
print(f" {i+1}. [{food_id}] {food_name}")
print(f" {food_desc}")
# Сохраняем ID первого продукта для следующего теста
if i == 0:
first_food_id = food_id
except Exception as e:
print(f"❌ Error during search: {e}")
# Тест 2: Получение информации о продукте
found_food_id = None
for query in search_queries:
try:
result = await client.search_food(query)
if "foods" in result and "food" in result["foods"]:
food_list = result["foods"]["food"]
if not isinstance(food_list, list):
food_list = [food_list]
if food_list:
found_food_id = food_list[0].get("food_id")
break
except:
continue
if found_food_id:
print(f"\n🔍 Testing food details for ID: {found_food_id}")
try:
result = await client.get_food(found_food_id)
if "food" not in result:
print(f"❌ Unexpected response format: {result}")
else:
food = result["food"]
food_name = food.get("food_name", "Unknown")
brand = food.get("brand_name", "Generic")
print(f"✅ Got details for: {food_name} [{brand}]")
# Выводим информацию о пищевой ценности
if "servings" in food:
servings = food["servings"]
if "serving" in servings:
serving_data = servings["serving"]
if not isinstance(serving_data, list):
serving_data = [serving_data]
print("\n📊 Nutrition info per serving:")
for i, serving in enumerate(serving_data[:2]): # Выводим до 2 видов порций
serving_desc = serving.get("serving_description", "Standard")
calories = serving.get("calories", "N/A")
protein = serving.get("protein", "N/A")
carbs = serving.get("carbohydrate", "N/A")
fat = serving.get("fat", "N/A")
print(f" Serving {i+1}: {serving_desc}")
print(f" Calories: {calories}")
print(f" Protein: {protein}g")
print(f" Carbohydrates: {carbs}g")
print(f" Fat: {fat}g")
except Exception as e:
print(f"❌ Error getting food details: {e}")
if __name__ == "__main__":
print("🚀 Starting FatSecret API test...")
asyncio.run(run_tests())
print("\n✅ Test completed!")

173
test_fatsecret_api_oauth1.py Executable file
View File

@@ -0,0 +1,173 @@
#!/usr/bin/env python3
"""
Скрипт для тестирования API FatSecret с использованием OAuth 1.0
"""
import os
import time
import hmac
import base64
import random
import hashlib
import urllib.parse
import requests
from dotenv import load_dotenv
# Загружаем .env файл
current_dir = os.path.dirname(os.path.abspath(__file__))
env_path = os.path.join(current_dir, ".env")
load_dotenv(env_path)
print(f"✅ Loaded .env from: {env_path}")
# Получаем API ключи из переменных окружения
FATSECRET_KEY = os.environ.get("FATSECRET_CUSTOMER_KEY") or os.environ.get("FATSECRET_CLIENT_ID")
FATSECRET_SECRET = os.environ.get("FATSECRET_CLIENT_SECRET")
if not FATSECRET_KEY or not FATSECRET_SECRET:
raise ValueError("FatSecret API keys not found in .env file")
print(f"🔑 Using FatSecret API keys: KEY={FATSECRET_KEY[:8]}...")
print(f"🔑 Using FatSecret SECRET (first few chars): {FATSECRET_SECRET[:5]}...")
def generate_oauth_params(http_method, url, params):
"""Создание и подписание OAuth 1.0 параметров"""
# Текущее время в секундах
timestamp = str(int(time.time()))
# Случайная строка для nonce
nonce = ''.join([str(random.randint(0, 9)) for _ in range(8)])
# Базовый набор параметров OAuth
oauth_params = {
'oauth_consumer_key': FATSECRET_KEY,
'oauth_nonce': nonce,
'oauth_signature_method': 'HMAC-SHA1',
'oauth_timestamp': timestamp,
'oauth_version': '1.0'
}
# Объединяем с параметрами запроса
all_params = {**params, **oauth_params}
# Сортируем параметры по ключу
sorted_params = sorted(all_params.items())
# Создаем строку параметров для подписи
param_string = "&".join([f"{urllib.parse.quote(str(k))}={urllib.parse.quote(str(v))}"
for k, v in sorted_params])
# Создаем строку для подписи
signature_base = f"{http_method}&{urllib.parse.quote(url, safe='')}&{urllib.parse.quote(param_string, safe='')}"
# Создаем ключ для подписи
signing_key = f"{urllib.parse.quote(str(FATSECRET_SECRET), safe='')}&"
# Создаем HMAC-SHA1 подпись
signature = base64.b64encode(
hmac.new(
signing_key.encode(),
signature_base.encode(),
hashlib.sha1
).digest()
).decode()
# Добавляем подпись к параметрам OAuth
all_params['oauth_signature'] = signature
return all_params
def search_food(query, max_results=5, locale=None):
"""Поиск продуктов по названию с использованием OAuth 1.0"""
print(f"\n🔍 Searching for '{query}'{' with locale ' + locale if locale else ''}...")
# URL для API
url = "https://platform.fatsecret.com/rest/server.api"
# Параметры запроса
params = {
'method': 'foods.search',
'search_expression': query,
'max_results': max_results,
'format': 'json'
}
# Добавляем локаль если указана
if locale:
params['language'] = locale
# Получаем подписанные OAuth параметры
oauth_params = generate_oauth_params("GET", url, params)
try:
# Отправляем запрос
response = requests.get(url, params=oauth_params)
print(f"📥 Response status code: {response.status_code}")
if response.status_code == 200:
print("✅ Search successful!")
result = response.json()
return result
else:
print(f"❌ Error during search: {response.status_code}")
print(response.text)
return None
except Exception as e:
print(f"❌ Exception during search: {e}")
return None
def process_search_results(result):
"""Обработка и вывод результатов поиска"""
if not result or "foods" not in result:
print("❌ No valid results found")
return
foods_data = result["foods"]
if "food" not in foods_data:
print("⚠️ No food items found")
return
food_list = foods_data["food"]
if not isinstance(food_list, list):
food_list = [food_list] # Если только один результат, оборачиваем в список
print(f"📊 Found {len(food_list)} results")
# Выводим первые 3 результата
for i, food in enumerate(food_list[:3]):
food_name = food.get("food_name", "Unknown")
food_id = food.get("food_id", "Unknown")
food_desc = food.get("food_description", "No description")
print(f" {i+1}. [{food_id}] {food_name}")
print(f" {food_desc}")
def main():
"""Основная функция для тестирования API FatSecret"""
print("\n🚀 Starting FatSecret API test with OAuth 1.0...\n")
# Тестируем поиск продуктов на английском
search_queries = ["PowerAde", "Americano", "Coca-Cola", "chicken breast"]
for query in search_queries:
result = search_food(query)
if result:
process_search_results(result)
# Тестируем поиск продуктов на русском
russian_queries = ["Барни", "хлеб", "яблоко"]
for query in russian_queries:
result = search_food(query, locale="ru_RU")
if result:
process_search_results(result)
print("\n✅ Test completed!")
if __name__ == "__main__":
main()

247
test_fatsecret_api_v2.py Executable file
View File

@@ -0,0 +1,247 @@
#!/usr/bin/env python3
"""
Скрипт для тестирования API FatSecret
Выполняет тестовые запросы к API FatSecret с использованием ключей из конфигурации приложения
"""
import os
import json
import time
import base64
import requests
import urllib.parse
from dotenv import load_dotenv
# Загружаем .env файл
current_dir = os.path.dirname(os.path.abspath(__file__))
env_path = os.path.join(current_dir, ".env")
load_dotenv(env_path)
print(f"✅ Loaded .env from: {env_path}")
# Получаем API ключи из переменных окружения
FATSECRET_CLIENT_ID = os.environ.get("FATSECRET_CLIENT_ID")
FATSECRET_CLIENT_SECRET = os.environ.get("FATSECRET_CLIENT_SECRET")
if not FATSECRET_CLIENT_ID or not FATSECRET_CLIENT_SECRET:
raise ValueError("FatSecret API keys not found in .env file")
print(f"🔑 Using FatSecret API keys: CLIENT_ID={FATSECRET_CLIENT_ID[:8]}...")
customer_key = os.environ.get("FATSECRET_CUSTOMER_KEY")
if customer_key:
print(f"🔑 Using CUSTOMER_KEY={customer_key[:8]}...")
def get_oauth_token():
"""Получение OAuth 2.0 токена для доступа к API"""
print("🔄 Getting OAuth token...")
# Создаем заголовок авторизации с Base64-кодированными ID и секретом
auth_string = f"{FATSECRET_CLIENT_ID}:{FATSECRET_CLIENT_SECRET}"
auth_header = base64.b64encode(auth_string.encode()).decode()
# Полный вывод учетных данных для диагностики
print(f"🔑 CLIENT_ID: {FATSECRET_CLIENT_ID}")
if FATSECRET_CLIENT_SECRET:
print(f"🔑 CLIENT_SECRET (first few chars): {FATSECRET_CLIENT_SECRET[:5]}...")
else:
print("⚠️ CLIENT_SECRET is missing!")
print(f"🔑 Authorization header: Basic {auth_header}")
# Выполняем запрос на получение токена
token_url = "https://oauth.fatsecret.com/connect/token"
headers = {
"Authorization": f"Basic {auth_header}",
"Content-Type": "application/x-www-form-urlencoded"
}
data = {
"grant_type": "client_credentials",
"scope": "basic"
}
print("📤 Sending request with headers:")
for key, value in headers.items():
print(f" {key}: {value if key != 'Authorization' else value[:30]}...")
print("📤 Sending request with data:")
for key, value in data.items():
print(f" {key}: {value}")
try:
response = requests.post(token_url, headers=headers, data=data)
# Дополнительная информация о запросе
print(f"📥 Response status code: {response.status_code}")
print(f"📥 Response headers: {dict(response.headers)}")
# Проверяем успешность запроса
if response.status_code == 200:
token_data = response.json()
access_token = token_data.get("access_token")
expires_in = token_data.get("expires_in")
print(f"✅ Got token, expires in {expires_in} seconds")
return access_token
else:
print(f"❌ Error getting token: {response.status_code}")
print(f"❌ Error response: {response.text}")
return None
except Exception as e:
print(f"❌ Exception getting token: {e}")
return None
def search_food(token, query, max_results=5):
"""Поиск продуктов по названию"""
if not token:
print("⚠️ No token available, cannot search")
return None
print(f"🔍 Searching for '{query}'...")
api_url = "https://platform.fatsecret.com/rest/server.api"
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
}
params = {
"method": "foods.search",
"search_expression": query,
"max_results": max_results,
"format": "json"
}
try:
response = requests.post(api_url, headers=headers, json=params)
if response.status_code == 200:
print(f"✅ Search successful")
result = response.json()
return result
else:
print(f"❌ Error searching: {response.status_code}")
print(response.text)
return None
except Exception as e:
print(f"❌ Exception during search: {e}")
return None
def get_food_details(token, food_id):
"""Получение информации о продукте по ID"""
if not token:
print("⚠️ No token available, cannot get food details")
return None
print(f"🔍 Getting details for food ID: {food_id}")
api_url = "https://platform.fatsecret.com/rest/server.api"
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
}
params = {
"method": "food.get",
"food_id": food_id,
"format": "json"
}
try:
response = requests.post(api_url, headers=headers, json=params)
if response.status_code == 200:
print(f"✅ Got food details")
result = response.json()
return result
else:
print(f"❌ Error getting food details: {response.status_code}")
print(response.text)
return None
except Exception as e:
print(f"❌ Exception getting food details: {e}")
return None
def main():
"""Основная функция для тестирования API FatSecret"""
print("\n🚀 Starting FatSecret API test...\n")
# Получаем токен доступа
token = get_oauth_token()
if not token:
print("❌ Failed to get OAuth token, exiting")
return
# Тестируем поиск продуктов
print("\n--- 📋 Testing Food Search ---")
search_queries = ["apple", "bread", "chicken breast", "молоко"]
first_food_id = None
for query in search_queries:
result = search_food(token, query)
if result and "foods" in result:
foods_data = result["foods"]
if "food" not in foods_data:
print(f"⚠️ No results found for '{query}'")
continue
food_list = foods_data["food"]
if not isinstance(food_list, list):
food_list = [food_list] # Если только один результат, оборачиваем в список
print(f"📊 Found {len(food_list)} results")
# Выводим первые 3 результата
for i, food in enumerate(food_list[:3]):
food_name = food.get("food_name", "Unknown")
food_id = food.get("food_id", "Unknown")
food_desc = food.get("food_description", "No description")
print(f" {i+1}. [{food_id}] {food_name}")
print(f" {food_desc}")
# Сохраняем ID первого продукта для следующего теста
if not first_food_id and food_list:
first_food_id = food_list[0].get("food_id")
# Тестируем получение информации о продукте
if first_food_id:
print("\n--- 🍎 Testing Food Details ---")
food_details = get_food_details(token, first_food_id)
if food_details and "food" in food_details:
food = food_details["food"]
food_name = food.get("food_name", "Unknown")
brand = food.get("brand_name", "Generic")
print(f"📝 Details for: {food_name} [{brand}]")
# Выводим информацию о пищевой ценности
if "servings" in food:
servings = food["servings"]
if "serving" in servings:
serving_data = servings["serving"]
if not isinstance(serving_data, list):
serving_data = [serving_data]
print("\n📊 Nutrition info per serving:")
for i, serving in enumerate(serving_data[:2]): # Выводим до 2 видов порций
serving_desc = serving.get("serving_description", "Standard")
calories = serving.get("calories", "N/A")
protein = serving.get("protein", "N/A")
carbs = serving.get("carbohydrate", "N/A")
fat = serving.get("fat", "N/A")
print(f" Serving {i+1}: {serving_desc}")
print(f" Calories: {calories}")
print(f" Protein: {protein}g")
print(f" Carbohydrates: {carbs}g")
print(f" Fat: {fat}g")
print("\n✅ Test completed!")
if __name__ == "__main__":
main()

347
tests/test_nutrition_api.py Executable file
View File

@@ -0,0 +1,347 @@
#!/usr/bin/env python3
"""
Скрипт для тестирования API сервиса питания (Nutrition Service)
"""
import os
import sys
import json
import requests
from datetime import datetime
from dotenv import load_dotenv
# Загружаем .env файл
current_dir = os.path.dirname(os.path.abspath(__file__))
env_path = os.path.join(current_dir, ".env")
load_dotenv(env_path)
print(f"✅ Загружен .env из: {env_path}")
# Базовый URL API
BASE_URL = os.environ.get("NUTRITION_API_URL", "http://localhost:8006/api/v1/nutrition")
AUTH_URL = os.environ.get("AUTH_API_URL", "http://localhost:8001/api/v1/auth")
# Настройки для тестовых данных
TEST_USER = {
"username": "test_nutrition_user",
"password": "Test123!",
"email": "test_nutrition@example.com",
"first_name": "Test",
"last_name": "Nutrition",
"phone": "+79991234999"
}
def get_auth_token():
"""Получение токена авторизации"""
print("\n🔑 Получаем токен авторизации...")
# Пытаемся сначала войти
try:
login_data = {
"username": TEST_USER["username"],
"password": TEST_USER["password"]
}
login_response = requests.post(
f"{AUTH_URL}/login",
json=login_data
)
if login_response.status_code == 200:
token = login_response.json().get("access_token")
print("✅ Успешный вход в систему!")
return token
except Exception as e:
print(f"⚠️ Ошибка при попытке входа: {e}")
# Если вход не удался, пробуем регистрацию
try:
register_response = requests.post(
f"{AUTH_URL}/register",
json=TEST_USER
)
if register_response.status_code == 201:
print("✅ Пользователь успешно зарегистрирован!")
# Теперь входим с новыми учетными данными
login_data = {
"username": TEST_USER["username"],
"password": TEST_USER["password"]
}
login_response = requests.post(
f"{AUTH_URL}/login",
json=login_data
)
if login_response.status_code == 200:
token = login_response.json().get("access_token")
print("✅ Успешный вход в систему!")
return token
except Exception as e:
print(f"❌ Ошибка при регистрации: {e}")
print("Не удалось получить токен авторизации")
return None
def search_food(token, query="apple", max_results=5):
"""Поиск продуктов питания"""
print(f"\n🔍 Поиск продуктов по запросу '{query}'...")
headers = {"Authorization": f"Bearer {token}"}
data = {
"query": query,
"max_results": max_results
}
try:
response = requests.post(
f"{BASE_URL}/search",
json=data,
headers=headers
)
print(f"📥 Код ответа: {response.status_code}")
if response.status_code == 200:
results = response.json()
print(f"✅ Найдено продуктов: {len(results)}")
# Выводим первые 3 результата
for i, food in enumerate(results[:3]):
print(f" {i+1}. [{food.get('id')}] {food.get('name')}")
print(f" {food.get('description')}")
print(f" Калории: {food.get('calories')} ккал/100г")
return results
else:
print(f"❌ Ошибка при поиске: {response.status_code}")
print(response.text)
return None
except Exception as e:
print(f"❌ Исключение при поиске: {e}")
return None
def add_diary_entry(token, food_id=1):
"""Добавление записи в дневник питания"""
print(f"\n📝 Добавление записи в дневник питания (продукт ID: {food_id})...")
headers = {"Authorization": f"Bearer {token}"}
today = datetime.now().strftime("%Y-%m-%d")
data = {
"food_item_id": food_id,
"entry_date": today,
"meal_type": "breakfast",
"quantity": 1.0,
"unit": "piece",
"notes": "Тестовая запись"
}
try:
response = requests.post(
f"{BASE_URL}/diary",
json=data,
headers=headers
)
print(f"📥 Код ответа: {response.status_code}")
if response.status_code in [200, 201]:
result = response.json()
print("✅ Запись успешно добавлена в дневник питания!")
print(f" ID записи: {result.get('id')}")
print(f" Калории: {result.get('calories')} ккал")
return result
else:
print(f"❌ Ошибка при добавлении записи: {response.status_code}")
print(response.text)
return None
except Exception as e:
print(f"❌ Исключение при добавлении записи: {e}")
return None
def get_diary_entries(token):
"""Получение записей дневника за текущий день"""
print("\n📋 Получение записей дневника питания...")
headers = {"Authorization": f"Bearer {token}"}
today = datetime.now().strftime("%Y-%m-%d")
try:
response = requests.get(
f"{BASE_URL}/diary?date={today}",
headers=headers
)
print(f"📥 Код ответа: {response.status_code}")
if response.status_code == 200:
results = response.json()
print(f"✅ Получено записей: {len(results)}")
# Выводим записи
for i, entry in enumerate(results):
print(f" {i+1}. Прием пищи: {entry.get('meal_type')}")
print(f" Продукт ID: {entry.get('food_item_id')}")
print(f" Калории: {entry.get('calories')} ккал")
return results
else:
print(f"❌ Ошибка при получении записей: {response.status_code}")
print(response.text)
return None
except Exception as e:
print(f"❌ Исключение при получении записей: {e}")
return None
def add_water_entry(token, amount_ml=250):
"""Добавление записи о потреблении воды"""
print(f"\n💧 Добавление записи о потреблении воды ({amount_ml} мл)...")
headers = {"Authorization": f"Bearer {token}"}
today = datetime.now().strftime("%Y-%m-%d")
data = {
"amount_ml": amount_ml,
"entry_date": today,
"notes": "Тестовая запись"
}
try:
response = requests.post(
f"{BASE_URL}/water",
json=data,
headers=headers
)
print(f"📥 Код ответа: {response.status_code}")
if response.status_code in [200, 201]:
result = response.json()
print("✅ Запись о потреблении воды успешно добавлена!")
print(f" ID записи: {result.get('id')}")
print(f" Объем: {result.get('amount_ml')} мл")
return result
else:
print(f"❌ Ошибка при добавлении записи о воде: {response.status_code}")
print(response.text)
return None
except Exception as e:
print(f"❌ Исключение при добавлении записи о воде: {e}")
return None
def add_activity_entry(token):
"""Добавление записи о физической активности"""
print("\n🏃‍♀️ Добавление записи о физической активности...")
headers = {"Authorization": f"Bearer {token}"}
today = datetime.now().strftime("%Y-%m-%d")
data = {
"entry_date": today,
"activity_type": "walking",
"duration_minutes": 30,
"distance_km": 2.5,
"intensity": "medium",
"notes": "Тестовая активность"
}
try:
response = requests.post(
f"{BASE_URL}/activity",
json=data,
headers=headers
)
print(f"📥 Код ответа: {response.status_code}")
if response.status_code in [200, 201]:
result = response.json()
print("✅ Запись о физической активности успешно добавлена!")
print(f" ID записи: {result.get('id')}")
print(f" Тип: {result.get('activity_type')}")
print(f" Продолжительность: {result.get('duration_minutes')} мин")
print(f" Потрачено калорий: {result.get('calories_burned')} ккал")
return result
else:
print(f"❌ Ошибка при добавлении записи об активности: {response.status_code}")
print(response.text)
return None
except Exception as e:
print(f"❌ Исключение при добавлении записи об активности: {e}")
return None
def get_daily_summary(token):
"""Получение дневной сводки"""
print("\n📊 Получение сводки за день...")
headers = {"Authorization": f"Bearer {token}"}
today = datetime.now().strftime("%Y-%m-%d")
try:
response = requests.get(
f"{BASE_URL}/summary?date={today}",
headers=headers
)
print(f"📥 Код ответа: {response.status_code}")
if response.status_code == 200:
result = response.json()
print("✅ Сводка за день успешно получена!")
print(f" Всего калорий: {result.get('total_calories')} ккал")
print(f" Всего белка: {result.get('total_protein')} г")
print(f" Всего жиров: {result.get('total_fat')} г")
print(f" Всего углеводов: {result.get('total_carbs')} г")
print(f" Потреблено воды: {result.get('water_consumed_ml')} мл")
print(f" Активность: {result.get('activity_minutes')} мин")
print(f" Сожжено калорий: {result.get('calories_burned')} ккал")
return result
else:
print(f"❌ Ошибка при получении сводки: {response.status_code}")
print(response.text)
return None
except Exception as e:
print(f"❌ Исключение при получении сводки: {e}")
return None
def main():
"""Основная функция для тестирования API сервиса питания"""
print("\n🚀 Запуск тестирования API сервиса питания...\n")
# Получаем токен авторизации
token = get_auth_token()
if not token:
print("❌ Невозможно продолжить тестирование без авторизации")
sys.exit(1)
# Выполняем поиск продуктов
search_results = search_food(token, "apple")
if search_results and len(search_results) > 0:
# Используем первый найденный продукт для дальнейшего тестирования
food_id = search_results[0].get("id")
# Добавляем запись в дневник питания
add_diary_entry(token, food_id)
# Получаем записи дневника
get_diary_entries(token)
else:
# Если поиск не дал результатов, продолжаем тестирование с предполагаемым ID продукта
print("⚠️ Используем предполагаемый ID продукта для дальнейших тестов")
add_diary_entry(token, 1)
# Добавляем записи о воде и активности
add_water_entry(token)
add_activity_entry(token)
# Получаем дневную сводку
get_daily_summary(token)
print("\n✅ Тестирование API сервиса питания завершено!")
if __name__ == "__main__":
main()

189
tests/test_nutrition_service.sh Executable file
View File

@@ -0,0 +1,189 @@
#!/bin/bash
# Скрипт для тестирования API сервиса питания через cURL
# Настройки
API_BASE_URL="http://localhost:8006/api/v1/nutrition"
AUTH_URL="http://localhost:8001/api/v1/auth"
TODAY=$(date +"%Y-%m-%d")
TEST_USERNAME="test_nutrition_user"
TEST_PASSWORD="Test123!"
# Цветной вывод
GREEN='\033[0;32m'
RED='\033[0;31m'
BLUE='\033[0;34m'
YELLOW='\033[0;33m'
NC='\033[0m' # No Color
echo -e "${BLUE}🚀 Запуск тестов для Nutrition Service API${NC}"
echo "---------------------------------------------"
# Шаг 1: Авторизация и получение токена
echo -e "${BLUE}📝 Шаг 1: Получение токена авторизации${NC}"
# Попытка входа
login_response=$(curl -s -X POST "${AUTH_URL}/login" \
-H "Content-Type: application/json" \
-d '{
"username": "'"${TEST_USERNAME}"'",
"password": "'"${TEST_PASSWORD}"'"
}')
# Проверяем, успешен ли вход
if [[ $login_response == *"access_token"* ]]; then
TOKEN=$(echo $login_response | grep -o '"access_token":"[^"]*' | sed 's/"access_token":"//')
echo -e "${GREEN}✅ Вход успешен!${NC}"
else
echo -e "${YELLOW}⚠️ Вход не удался, пробуем регистрацию...${NC}"
# Пробуем зарегистрировать пользователя
curl -s -X POST "${AUTH_URL}/register" \
-H "Content-Type: application/json" \
-d '{
"email": "'"${TEST_USERNAME}@example.com"'",
"username": "'"${TEST_USERNAME}"'",
"password": "'"${TEST_PASSWORD}"'",
"first_name": "Test",
"last_name": "Nutrition",
"phone": "+79991234999"
}' > /dev/null
# После регистрации пробуем войти снова
login_response=$(curl -s -X POST "${AUTH_URL}/login" \
-H "Content-Type: application/json" \
-d '{
"username": "'"${TEST_USERNAME}"'",
"password": "'"${TEST_PASSWORD}"'"
}')
if [[ $login_response == *"access_token"* ]]; then
TOKEN=$(echo $login_response | grep -o '"access_token":"[^"]*' | sed 's/"access_token":"//')
echo -e "${GREEN}✅ Регистрация и вход успешны!${NC}"
else
echo -e "${RED}Не удалось получить токен авторизации${NC}"
echo "Ответ сервера: $login_response"
exit 1
fi
fi
# Шаг 2: Поиск продуктов
echo -e "\n${BLUE}📝 Шаг 2: Поиск продуктов${NC}"
search_response=$(curl -s -X POST "${API_BASE_URL}/search" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer ${TOKEN}" \
-d '{
"query": "apple",
"max_results": 5
}')
echo "Результат поиска:"
echo "$search_response" | grep -o '"name":"[^"]*' | head -3 | sed 's/"name":"/- /'
echo "..."
# Получаем ID первого продукта из результатов поиска
FOOD_ID=$(echo $search_response | grep -o '"id":[0-9]*' | head -1 | sed 's/"id"://')
if [[ -z "$FOOD_ID" ]]; then
echo -e "${YELLOW}⚠️ Не удалось получить ID продукта, используем значение по умолчанию${NC}"
FOOD_ID=1
else
echo -e "${GREEN}✅ Получен ID продукта: ${FOOD_ID}${NC}"
fi
# Шаг 3: Добавление записи в дневник питания
echo -e "\n${BLUE}📝 Шаг 3: Добавление записи в дневник питания${NC}"
diary_response=$(curl -s -X POST "${API_BASE_URL}/diary" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer ${TOKEN}" \
-d '{
"food_item_id": '"${FOOD_ID}"',
"entry_date": "'"${TODAY}"'",
"meal_type": "breakfast",
"quantity": 1.5,
"unit": "piece",
"notes": "Тестовая запись"
}')
if [[ $diary_response == *"id"* ]]; then
echo -e "${GREEN}✅ Запись добавлена в дневник питания${NC}"
echo "Детали записи:"
echo "$diary_response" | grep -o '"calories":[0-9.]*' | sed 's/"calories":/Калории: /'
else
echo -e "${RED}❌ Ошибка при добавлении записи в дневник${NC}"
echo "Ответ сервера: $diary_response"
fi
# Шаг 4: Получение записей дневника
echo -e "\n${BLUE}📝 Шаг 4: Получение записей дневника${NC}"
get_diary_response=$(curl -s -X GET "${API_BASE_URL}/diary?date=${TODAY}" \
-H "Authorization: Bearer ${TOKEN}")
if [[ $get_diary_response == *"meal_type"* ]]; then
echo -e "${GREEN}✅ Записи дневника успешно получены${NC}"
echo "Количество записей: $(echo $get_diary_response | grep -o '"meal_type"' | wc -l)"
else
echo -e "${YELLOW}⚠️ Нет записей в дневнике или ошибка получения${NC}"
echo "Ответ сервера: $get_diary_response"
fi
# Шаг 5: Добавление записи о потреблении воды
echo -e "\n${BLUE}📝 Шаг 5: Добавление записи о потреблении воды${NC}"
water_response=$(curl -s -X POST "${API_BASE_URL}/water" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer ${TOKEN}" \
-d '{
"amount_ml": 250,
"entry_date": "'"${TODAY}"'",
"notes": "Утренний стакан воды"
}')
if [[ $water_response == *"id"* ]]; then
echo -e "${GREEN}✅ Запись о потреблении воды добавлена${NC}"
echo "Детали записи:"
echo "$water_response" | grep -o '"amount_ml":[0-9]*' | sed 's/"amount_ml":/Объем (мл): /'
else
echo -e "${RED}❌ Ошибка при добавлении записи о воде${NC}"
echo "Ответ сервера: $water_response"
fi
# Шаг 6: Добавление записи о физической активности
echo -e "\n${BLUE}📝 Шаг 6: Добавление записи о физической активности${NC}"
activity_response=$(curl -s -X POST "${API_BASE_URL}/activity" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer ${TOKEN}" \
-d '{
"entry_date": "'"${TODAY}"'",
"activity_type": "running",
"duration_minutes": 30,
"distance_km": 5.2,
"intensity": "medium",
"notes": "Утренняя пробежка"
}')
if [[ $activity_response == *"id"* ]]; then
echo -e "${GREEN}✅ Запись о физической активности добавлена${NC}"
echo "Детали записи:"
echo "$activity_response" | grep -o '"duration_minutes":[0-9]*' | sed 's/"duration_minutes":/Продолжительность (мин): /'
echo "$activity_response" | grep -o '"calories_burned":[0-9.]*' | sed 's/"calories_burned":/Сожжено калорий: /'
else
echo -e "${RED}❌ Ошибка при добавлении записи об активности${NC}"
echo "Ответ сервера: $activity_response"
fi
# Шаг 7: Получение сводки за день
echo -e "\n${BLUE}📝 Шаг 7: Получение сводки за день${NC}"
summary_response=$(curl -s -X GET "${API_BASE_URL}/summary?date=${TODAY}" \
-H "Authorization: Bearer ${TOKEN}")
if [[ $summary_response == *"total_calories"* ]]; then
echo -e "${GREEN}✅ Дневная сводка успешно получена${NC}"
echo "Детали сводки:"
echo "$summary_response" | grep -o '"total_calories":[0-9.]*' | sed 's/"total_calories":/Всего калорий: /'
echo "$summary_response" | grep -o '"water_consumed_ml":[0-9]*' | sed 's/"water_consumed_ml":/Потреблено воды (мл): /'
echo "$summary_response" | grep -o '"activity_minutes":[0-9]*' | sed 's/"activity_minutes":/Минуты активности: /'
else
echo -e "${YELLOW}⚠️ Нет данных для сводки или ошибка получения${NC}"
echo "Ответ сервера: $summary_response"
fi
echo -e "\n${GREEN}✅ Тестирование API сервиса питания завершено!${NC}"

View File

@@ -1,4 +1,4 @@
#!/home/trevor/dev/chat/venv/bin/python
#!/home/trevor/dev/chat/venv/bin/python3.12
# -*- coding: utf-8 -*-
import re
import sys

View File

@@ -1,4 +1,4 @@
#!/home/trevor/dev/chat/venv/bin/python
#!/home/trevor/dev/chat/venv/bin/python3.12
# -*- coding: utf-8 -*-
import re
import sys

View File

@@ -1,4 +1,4 @@
#!/home/trevor/dev/chat/venv/bin/python
#!/home/trevor/dev/chat/venv/bin/python3.12
# -*- coding: utf-8 -*-
import re
import sys

View File

@@ -1,8 +0,0 @@
#!/home/trevor/dev/chat/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from fastapi.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

View File

@@ -1,4 +1,4 @@
#!/home/trevor/dev/chat/venv/bin/python
#!/home/trevor/dev/chat/venv/bin/python3.12
# -*- coding: utf-8 -*-
import re
import sys

View File

@@ -1,33 +0,0 @@
PyJWT-2.10.1.dist-info/AUTHORS.rst,sha256=klzkNGECnu2_VY7At89_xLBF3vUSDruXk3xwgUBxzwc,322
PyJWT-2.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PyJWT-2.10.1.dist-info/LICENSE,sha256=eXp6ICMdTEM-nxkR2xcx0GtYKLmPSZgZoDT3wPVvXOU,1085
PyJWT-2.10.1.dist-info/METADATA,sha256=EkewF6D6KU8SGaaQzVYfxUUU1P_gs_dp1pYTkoYvAx8,3990
PyJWT-2.10.1.dist-info/RECORD,,
PyJWT-2.10.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
PyJWT-2.10.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
PyJWT-2.10.1.dist-info/top_level.txt,sha256=RP5DHNyJbMq2ka0FmfTgoSaQzh7e3r5XuCWCO8a00k8,4
jwt/__init__.py,sha256=VB2vFKuboTjcDGeZ8r-UqK_dz3NsQSQEqySSICby8Xg,1711
jwt/__pycache__/__init__.cpython-312.pyc,,
jwt/__pycache__/algorithms.cpython-312.pyc,,
jwt/__pycache__/api_jwk.cpython-312.pyc,,
jwt/__pycache__/api_jws.cpython-312.pyc,,
jwt/__pycache__/api_jwt.cpython-312.pyc,,
jwt/__pycache__/exceptions.cpython-312.pyc,,
jwt/__pycache__/help.cpython-312.pyc,,
jwt/__pycache__/jwk_set_cache.cpython-312.pyc,,
jwt/__pycache__/jwks_client.cpython-312.pyc,,
jwt/__pycache__/types.cpython-312.pyc,,
jwt/__pycache__/utils.cpython-312.pyc,,
jwt/__pycache__/warnings.cpython-312.pyc,,
jwt/algorithms.py,sha256=cKr-XEioe0mBtqJMCaHEswqVOA1Z8Purt5Sb3Bi-5BE,30409
jwt/api_jwk.py,sha256=6F1r7rmm8V5qEnBKA_xMjS9R7VoANe1_BL1oD2FrAjE,4451
jwt/api_jws.py,sha256=aM8vzqQf6mRrAw7bRy-Moj_pjWsKSVQyYK896AfMjJU,11762
jwt/api_jwt.py,sha256=OGT4hok1l5A6FH_KdcrU5g6u6EQ8B7em0r9kGM9SYgA,14512
jwt/exceptions.py,sha256=bUIOJ-v9tjopTLS-FYOTc3kFx5WP5IZt7ksN_HE1G9Q,1211
jwt/help.py,sha256=vFdNzjQoAch04XCMYpCkyB2blaqHAGAqQrtf9nSPkdk,1808
jwt/jwk_set_cache.py,sha256=hBKmN-giU7-G37L_XKgc_OZu2ah4wdbj1ZNG_GkoSE8,959
jwt/jwks_client.py,sha256=p9b-IbQqo2tEge9Zit3oSPBFNePqwho96VLbnUrHUWs,4259
jwt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
jwt/types.py,sha256=VnhGv_VFu5a7_mrPoSCB7HaNLrJdhM8Sq1sSfEg0gLU,99
jwt/utils.py,sha256=hxOjvDBheBYhz-RIPiEz7Q88dSUSTMzEdKE_Ww2VdJw,3640
jwt/warnings.py,sha256=50XWOnyNsIaqzUJTk6XHNiIDykiL763GYA92MjTKmok,59

View File

@@ -1,45 +1,47 @@
Metadata-Version: 2.1
Name: PyJWT
Version: 2.10.1
Version: 2.8.0
Summary: JSON Web Token implementation in Python
Author-email: Jose Padilla <hello@jpadilla.com>
Home-page: https://github.com/jpadilla/pyjwt
Author: Jose Padilla
Author-email: hello@jpadilla.com
License: MIT
Project-URL: Homepage, https://github.com/jpadilla/pyjwt
Keywords: json,jwt,security,signing,token,web
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Natural Language :: English
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Topic :: Utilities
Requires-Python: >=3.9
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE
License-File: AUTHORS.rst
Requires-Dist: typing-extensions ; python_version <= "3.7"
Provides-Extra: crypto
Requires-Dist: cryptography>=3.4.0; extra == "crypto"
Requires-Dist: cryptography (>=3.4.0) ; extra == 'crypto'
Provides-Extra: dev
Requires-Dist: coverage[toml]==5.0.4; extra == "dev"
Requires-Dist: cryptography>=3.4.0; extra == "dev"
Requires-Dist: pre-commit; extra == "dev"
Requires-Dist: pytest<7.0.0,>=6.0.0; extra == "dev"
Requires-Dist: sphinx; extra == "dev"
Requires-Dist: sphinx-rtd-theme; extra == "dev"
Requires-Dist: zope.interface; extra == "dev"
Requires-Dist: sphinx (<5.0.0,>=4.5.0) ; extra == 'dev'
Requires-Dist: sphinx-rtd-theme ; extra == 'dev'
Requires-Dist: zope.interface ; extra == 'dev'
Requires-Dist: cryptography (>=3.4.0) ; extra == 'dev'
Requires-Dist: pytest (<7.0.0,>=6.0.0) ; extra == 'dev'
Requires-Dist: coverage[toml] (==5.0.4) ; extra == 'dev'
Requires-Dist: pre-commit ; extra == 'dev'
Provides-Extra: docs
Requires-Dist: sphinx; extra == "docs"
Requires-Dist: sphinx-rtd-theme; extra == "docs"
Requires-Dist: zope.interface; extra == "docs"
Requires-Dist: sphinx (<5.0.0,>=4.5.0) ; extra == 'docs'
Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
Requires-Dist: zope.interface ; extra == 'docs'
Provides-Extra: tests
Requires-Dist: coverage[toml]==5.0.4; extra == "tests"
Requires-Dist: pytest<7.0.0,>=6.0.0; extra == "tests"
Requires-Dist: pytest (<7.0.0,>=6.0.0) ; extra == 'tests'
Requires-Dist: coverage[toml] (==5.0.4) ; extra == 'tests'
PyJWT
=====
@@ -61,12 +63,11 @@ A Python implementation of `RFC 7519 <https://tools.ietf.org/html/rfc7519>`_. Or
Sponsor
-------
.. |auth0-logo| image:: https://github.com/user-attachments/assets/ee98379e-ee76-4bcb-943a-e25c4ea6d174
:width: 160px
+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| |auth0-logo| | If you want to quickly add secure token-based authentication to Python projects, feel free to check Auth0's Python SDK and free plan at `auth0.com/developers <https://auth0.com/developers?utm_source=GHsponsor&utm_medium=GHsponsor&utm_campaign=pyjwt&utm_content=auth>`_. |
+--------------+-----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| |auth0-logo| | If you want to quickly add secure token-based authentication to Python projects, feel free to check Auth0's Python SDK and free plan at `auth0.com/signup <https://auth0.com/signup?utm_source=external_sites&utm_medium=pyjwt&utm_campaign=devn_signup>`_. |
+--------------+-----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
.. |auth0-logo| image:: https://user-images.githubusercontent.com/83319/31722733-de95bbde-b3ea-11e7-96bf-4f4e8f915588.png
Installing
----------

View File

@@ -0,0 +1,33 @@
PyJWT-2.8.0.dist-info/AUTHORS.rst,sha256=klzkNGECnu2_VY7At89_xLBF3vUSDruXk3xwgUBxzwc,322
PyJWT-2.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PyJWT-2.8.0.dist-info/LICENSE,sha256=eXp6ICMdTEM-nxkR2xcx0GtYKLmPSZgZoDT3wPVvXOU,1085
PyJWT-2.8.0.dist-info/METADATA,sha256=pV2XZjvithGcVesLHWAv0J4T5t8Qc66fip2sbxwoz1o,4160
PyJWT-2.8.0.dist-info/RECORD,,
PyJWT-2.8.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
PyJWT-2.8.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
PyJWT-2.8.0.dist-info/top_level.txt,sha256=RP5DHNyJbMq2ka0FmfTgoSaQzh7e3r5XuCWCO8a00k8,4
jwt/__init__.py,sha256=mV9lg6n4-0xiqCKaE1eEPC9a4j6sEkEYQcKghULE7kU,1670
jwt/__pycache__/__init__.cpython-312.pyc,,
jwt/__pycache__/algorithms.cpython-312.pyc,,
jwt/__pycache__/api_jwk.cpython-312.pyc,,
jwt/__pycache__/api_jws.cpython-312.pyc,,
jwt/__pycache__/api_jwt.cpython-312.pyc,,
jwt/__pycache__/exceptions.cpython-312.pyc,,
jwt/__pycache__/help.cpython-312.pyc,,
jwt/__pycache__/jwk_set_cache.cpython-312.pyc,,
jwt/__pycache__/jwks_client.cpython-312.pyc,,
jwt/__pycache__/types.cpython-312.pyc,,
jwt/__pycache__/utils.cpython-312.pyc,,
jwt/__pycache__/warnings.cpython-312.pyc,,
jwt/algorithms.py,sha256=RDsv5Lm3bzwsiWT3TynT7JR41R6H6s_fWUGOIqd9x_I,29800
jwt/api_jwk.py,sha256=HPxVqgBZm7RTaEXydciNBCuYNKDYOC_prTdaN9toGbo,4196
jwt/api_jws.py,sha256=da17RrDe0PDccTbx3rx2lLezEG_c_YGw_vVHa335IOk,11099
jwt/api_jwt.py,sha256=yF9DwF1kt3PA5n_TiU0OmHd0LtPHfe4JCE1XOfKPjw0,12638
jwt/exceptions.py,sha256=KDC3M7cTrpR4OQXVURlVMThem0pfANSgBxRz-ttivmo,1046
jwt/help.py,sha256=Jrp84fG43sCwmSIaDtY08I6ZR2VE7NhrTff89tYSE40,1749
jwt/jwk_set_cache.py,sha256=hBKmN-giU7-G37L_XKgc_OZu2ah4wdbj1ZNG_GkoSE8,959
jwt/jwks_client.py,sha256=9W8JVyGByQgoLbBN1u5iY1_jlgfnnukeOBTpqaM_9SE,4222
jwt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
jwt/types.py,sha256=VnhGv_VFu5a7_mrPoSCB7HaNLrJdhM8Sq1sSfEg0gLU,99
jwt/utils.py,sha256=PAI05_8MHQCxWQTDlwN0hTtTIT2DTTZ28mm1x6-26UY,3903
jwt/warnings.py,sha256=50XWOnyNsIaqzUJTk6XHNiIDykiL763GYA92MjTKmok,59

View File

@@ -1,5 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (80.9.0)
Generator: bdist_wheel (0.40.0)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -1,4 +1,4 @@
Copyright 2005-2025 SQLAlchemy authors and contributors <see AUTHORS file>.
Copyright 2005-2023 SQLAlchemy authors and contributors <see AUTHORS file>.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in

View File

@@ -1,6 +1,6 @@
Metadata-Version: 2.4
Metadata-Version: 2.1
Name: SQLAlchemy
Version: 2.0.43
Version: 2.0.23
Summary: Database Abstraction Library
Home-page: https://www.sqlalchemy.org
Author: Mike Bayer
@@ -10,6 +10,7 @@ Project-URL: Documentation, https://docs.sqlalchemy.org
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
@@ -18,70 +19,67 @@ Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Database :: Front-Ends
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: importlib-metadata; python_version < "3.8"
Requires-Dist: greenlet>=1; python_version < "3.14" and (platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32"))))))
Requires-Dist: typing-extensions>=4.6.0
Provides-Extra: asyncio
Requires-Dist: greenlet>=1; extra == "asyncio"
Provides-Extra: mypy
Requires-Dist: mypy>=0.910; extra == "mypy"
Provides-Extra: mssql
Requires-Dist: pyodbc; extra == "mssql"
Provides-Extra: mssql-pymssql
Requires-Dist: pymssql; extra == "mssql-pymssql"
Provides-Extra: mssql-pyodbc
Requires-Dist: pyodbc; extra == "mssql-pyodbc"
Provides-Extra: mysql
Requires-Dist: mysqlclient>=1.4.0; extra == "mysql"
Provides-Extra: mysql-connector
Requires-Dist: mysql-connector-python; extra == "mysql-connector"
Provides-Extra: mariadb-connector
Requires-Dist: mariadb!=1.1.10,!=1.1.2,!=1.1.5,>=1.0.1; extra == "mariadb-connector"
Provides-Extra: oracle
Requires-Dist: cx_oracle>=8; extra == "oracle"
Provides-Extra: oracle-oracledb
Requires-Dist: oracledb>=1.0.1; extra == "oracle-oracledb"
Provides-Extra: postgresql
Requires-Dist: psycopg2>=2.7; extra == "postgresql"
Provides-Extra: postgresql-pg8000
Requires-Dist: pg8000>=1.29.1; extra == "postgresql-pg8000"
Provides-Extra: postgresql-asyncpg
Requires-Dist: greenlet>=1; extra == "postgresql-asyncpg"
Requires-Dist: asyncpg; extra == "postgresql-asyncpg"
Provides-Extra: postgresql-psycopg2binary
Requires-Dist: psycopg2-binary; extra == "postgresql-psycopg2binary"
Provides-Extra: postgresql-psycopg2cffi
Requires-Dist: psycopg2cffi; extra == "postgresql-psycopg2cffi"
Provides-Extra: postgresql-psycopg
Requires-Dist: psycopg>=3.0.7; extra == "postgresql-psycopg"
Provides-Extra: postgresql-psycopgbinary
Requires-Dist: psycopg[binary]>=3.0.7; extra == "postgresql-psycopgbinary"
Provides-Extra: pymysql
Requires-Dist: pymysql; extra == "pymysql"
Requires-Dist: typing-extensions >=4.2.0
Requires-Dist: greenlet !=0.4.17 ; platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32")))))
Requires-Dist: importlib-metadata ; python_version < "3.8"
Provides-Extra: aiomysql
Requires-Dist: greenlet>=1; extra == "aiomysql"
Requires-Dist: aiomysql>=0.2.0; extra == "aiomysql"
Requires-Dist: greenlet !=0.4.17 ; extra == 'aiomysql'
Requires-Dist: aiomysql >=0.2.0 ; extra == 'aiomysql'
Provides-Extra: aioodbc
Requires-Dist: greenlet>=1; extra == "aioodbc"
Requires-Dist: aioodbc; extra == "aioodbc"
Provides-Extra: asyncmy
Requires-Dist: greenlet>=1; extra == "asyncmy"
Requires-Dist: asyncmy!=0.2.4,!=0.2.6,>=0.2.3; extra == "asyncmy"
Requires-Dist: greenlet !=0.4.17 ; extra == 'aioodbc'
Requires-Dist: aioodbc ; extra == 'aioodbc'
Provides-Extra: aiosqlite
Requires-Dist: greenlet>=1; extra == "aiosqlite"
Requires-Dist: aiosqlite; extra == "aiosqlite"
Requires-Dist: typing_extensions!=3.10.0.1; extra == "aiosqlite"
Requires-Dist: greenlet !=0.4.17 ; extra == 'aiosqlite'
Requires-Dist: aiosqlite ; extra == 'aiosqlite'
Requires-Dist: typing-extensions !=3.10.0.1 ; extra == 'aiosqlite'
Provides-Extra: asyncio
Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncio'
Provides-Extra: asyncmy
Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncmy'
Requires-Dist: asyncmy !=0.2.4,!=0.2.6,>=0.2.3 ; extra == 'asyncmy'
Provides-Extra: mariadb_connector
Requires-Dist: mariadb !=1.1.2,!=1.1.5,>=1.0.1 ; extra == 'mariadb_connector'
Provides-Extra: mssql
Requires-Dist: pyodbc ; extra == 'mssql'
Provides-Extra: mssql_pymssql
Requires-Dist: pymssql ; extra == 'mssql_pymssql'
Provides-Extra: mssql_pyodbc
Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
Provides-Extra: mypy
Requires-Dist: mypy >=0.910 ; extra == 'mypy'
Provides-Extra: mysql
Requires-Dist: mysqlclient >=1.4.0 ; extra == 'mysql'
Provides-Extra: mysql_connector
Requires-Dist: mysql-connector-python ; extra == 'mysql_connector'
Provides-Extra: oracle
Requires-Dist: cx-oracle >=8 ; extra == 'oracle'
Provides-Extra: oracle_oracledb
Requires-Dist: oracledb >=1.0.1 ; extra == 'oracle_oracledb'
Provides-Extra: postgresql
Requires-Dist: psycopg2 >=2.7 ; extra == 'postgresql'
Provides-Extra: postgresql_asyncpg
Requires-Dist: greenlet !=0.4.17 ; extra == 'postgresql_asyncpg'
Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg'
Provides-Extra: postgresql_pg8000
Requires-Dist: pg8000 >=1.29.1 ; extra == 'postgresql_pg8000'
Provides-Extra: postgresql_psycopg
Requires-Dist: psycopg >=3.0.7 ; extra == 'postgresql_psycopg'
Provides-Extra: postgresql_psycopg2binary
Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
Provides-Extra: postgresql_psycopg2cffi
Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
Provides-Extra: postgresql_psycopgbinary
Requires-Dist: psycopg[binary] >=3.0.7 ; extra == 'postgresql_psycopgbinary'
Provides-Extra: pymysql
Requires-Dist: pymysql ; extra == 'pymysql'
Provides-Extra: sqlcipher
Requires-Dist: sqlcipher3_binary; extra == "sqlcipher"
Dynamic: license-file
Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher'
SQLAlchemy
==========

View File

@@ -0,0 +1,530 @@
SQLAlchemy-2.0.23.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
SQLAlchemy-2.0.23.dist-info/LICENSE,sha256=2lSTeluT1aC-5eJXO8vhkzf93qCSeV_mFXLrv3tNdIU,1100
SQLAlchemy-2.0.23.dist-info/METADATA,sha256=znDChLueFNPCOPuNix-FfY7FG6aQOCM-lQwwN-cPLQs,9551
SQLAlchemy-2.0.23.dist-info/RECORD,,
SQLAlchemy-2.0.23.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
SQLAlchemy-2.0.23.dist-info/WHEEL,sha256=JmQLNqDEfvnYMfsIaVeSP3fmUcYDwmF12m3QYW0c7QQ,152
SQLAlchemy-2.0.23.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
sqlalchemy/__init__.py,sha256=DjKCAltzrHGfaVdXVeFJpBmTaX6JmyloHANzewBUWo4,12708
sqlalchemy/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/__pycache__/events.cpython-312.pyc,,
sqlalchemy/__pycache__/exc.cpython-312.pyc,,
sqlalchemy/__pycache__/inspection.cpython-312.pyc,,
sqlalchemy/__pycache__/log.cpython-312.pyc,,
sqlalchemy/__pycache__/schema.cpython-312.pyc,,
sqlalchemy/__pycache__/types.cpython-312.pyc,,
sqlalchemy/connectors/__init__.py,sha256=uKUYWQoXyleIyjWBuh7gzgnazJokx3DaasKJbFOfQGA,476
sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc,,
sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc,,
sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc,,
sqlalchemy/connectors/aioodbc.py,sha256=QiafuN9bx_wcIs8tByLftTmGAegXPoFPwUaxCDU_ZQA,5737
sqlalchemy/connectors/asyncio.py,sha256=ZZmJSFT50u-GEjZzytQOdB_tkBFxi3XPWRrNhs_nASc,6139
sqlalchemy/connectors/pyodbc.py,sha256=NskMydn26ZkHL8aQ1V3L4WIAWin3zwJ5VEnlHvAD1DE,8453
sqlalchemy/cyextension/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/cyextension/collections.cpython-312-x86_64-linux-gnu.so,sha256=qPSMnyXVSLYHMr_ot_ZK7yEYadhTuT8ryb6eTMFFWrM,1947440
sqlalchemy/cyextension/collections.pyx,sha256=KDI5QTOyYz9gDl-3d7MbGMA0Kc-wxpJqnLmCaUmQy2U,12323
sqlalchemy/cyextension/immutabledict.cpython-312-x86_64-linux-gnu.so,sha256=J9m0gK6R8PGR36jxAKx415VxX0-0fqvbQAP9-DDU1qA,811232
sqlalchemy/cyextension/immutabledict.pxd,sha256=oc8BbnQwDg7pWAdThB-fzu8s9_ViOe1Ds-8T0r0POjI,41
sqlalchemy/cyextension/immutabledict.pyx,sha256=aQJPZKjcqbO8jHDqpC9F-v-ew2qAjUscc5CntaheZUk,3285
sqlalchemy/cyextension/processors.cpython-312-x86_64-linux-gnu.so,sha256=WOLcEWRNXn4UtJGhzF5B1h7JpPPfn-ziQMT0lkhobQE,533968
sqlalchemy/cyextension/processors.pyx,sha256=0swFIBdR19x1kPRe-dijBaLW898AhH6QJizbv4ho9pk,1545
sqlalchemy/cyextension/resultproxy.cpython-312-x86_64-linux-gnu.so,sha256=bte73oURZXuV7YvkjyGo-OjRCnSgYukqDp5KM9-Z8xY,626112
sqlalchemy/cyextension/resultproxy.pyx,sha256=cDtMjLTdC47g7cME369NSOCck3JwG2jwZ6j25no3_gw,2477
sqlalchemy/cyextension/util.cpython-312-x86_64-linux-gnu.so,sha256=8yMbb069NQN1b6yAsCBCMpbX94sH4iLs61vPNxd0bOg,958760
sqlalchemy/cyextension/util.pyx,sha256=lv03p63oVn23jLhMI4_RYGewUnJfh-4FkrNMEFL7A3Y,2289
sqlalchemy/dialects/__init__.py,sha256=hLsgIEomunlp4mNLnvjCQTLOnBVva8N7IT2-RYrN2_4,1770
sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc,,
sqlalchemy/dialects/_typing.py,sha256=P2ML2o4b_bWAAy3zbdoUjx3vXsMNwpiOblef8ThCxlM,648
sqlalchemy/dialects/mssql/__init__.py,sha256=CYbbydyMSLjUq8vY1siNStd4lvjVXod8ddeDS6ELHLk,1871
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc,,
sqlalchemy/dialects/mssql/aioodbc.py,sha256=ncj3yyfvW91o3g19GB5s1I0oaZKUO_P-R2nwnLF0t9E,2013
sqlalchemy/dialects/mssql/base.py,sha256=l9vX6fK6DJEYA00N9uDnvSbqfgvxXfYUn2C4AF5T920,133649
sqlalchemy/dialects/mssql/information_schema.py,sha256=ll0zAupJ4cPvhi9v5hTi7PQLU1lae4o6eQ5Vg7gykXQ,8074
sqlalchemy/dialects/mssql/json.py,sha256=B0m6H08CKuk-yomDHcCwfQbVuVN2WLufuVueA_qb1NQ,4573
sqlalchemy/dialects/mssql/provision.py,sha256=x7XRSQDxz4jz2uIpqwhuIXpL9bic0Vw7Mhy39HOkyqY,5013
sqlalchemy/dialects/mssql/pymssql.py,sha256=BfJp9t-IQabqWXySJBmP9pwNTWnJqbjA2jJM9M4XeWc,4029
sqlalchemy/dialects/mssql/pyodbc.py,sha256=qwZ8ByOTZ1WObjxeOravoJBSBX-s4RJ_PZ5VJ_Ch5Ws,27048
sqlalchemy/dialects/mysql/__init__.py,sha256=btLABiNnmbWt9ziW-XgVWEB1qHWQcSFz7zxZNw4m_LY,2144
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc,,
sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc,,
sqlalchemy/dialects/mysql/aiomysql.py,sha256=Zb-_F9Pzl0t-fT1bZwbNNne6jjCUqBXxeizbhMFPqls,9750
sqlalchemy/dialects/mysql/asyncmy.py,sha256=zqupDz7AJihjv3E8w_4XAtq95d8stdrETNx60MLNVr0,9819
sqlalchemy/dialects/mysql/base.py,sha256=q-DzkR_txwDTeWTEByzHAoIArYU3Bb5HT2Bnmuw7WIM,120688
sqlalchemy/dialects/mysql/cymysql.py,sha256=5CQVJAlqQ3pT4IDGSQJH2hCzj-EWjUitA21MLqJwEEs,2291
sqlalchemy/dialects/mysql/dml.py,sha256=qw0ZweHbMsbNyVSfC17HqylCnf7XAuIjtgofiWABT8k,7636
sqlalchemy/dialects/mysql/enumerated.py,sha256=1L2J2wT6nQEmRS4z-jzZpoi44IqIaHgBRZZB9m55czo,8439
sqlalchemy/dialects/mysql/expression.py,sha256=WW5G2XPwqJfXjuzHBt4BRP0pCLcPJkPD1mvZX1g0JL0,4066
sqlalchemy/dialects/mysql/json.py,sha256=JlSFBAHhJ9JmV-3azH80xkLgeh7g6A6DVyNVCNZiKPU,2260
sqlalchemy/dialects/mysql/mariadb.py,sha256=Sugyngvo6j6SfFFuJ23rYeFWEPdZ9Ji9guElsk_1WSQ,844
sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=F1VPosecC1hDZqjzZI29j4GUduyU4ewPwb-ekBQva5w,8725
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=5glmkPhD_KP-Mci8ZXBr4yzqH1MDfzCJ9F_kZNyXcGo,5666
sqlalchemy/dialects/mysql/mysqldb.py,sha256=R5BDiXiHX5oFuAOzyxZ6TYUTGzly-dulMeQLkeia6kk,9649
sqlalchemy/dialects/mysql/provision.py,sha256=uPT6-BIoP_12XLmWAza1TDFNhOVVJ3rmQoMH7nvh-Vg,3226
sqlalchemy/dialects/mysql/pymysql.py,sha256=d2-00IPoyEDkR9REQTE-DGEQrGshUq_0G5liZ5FiSEM,4032
sqlalchemy/dialects/mysql/pyodbc.py,sha256=mkOvumrxpmAi6noZlkaTVKz2F7G5vLh2vx0cZSn9VTA,4288
sqlalchemy/dialects/mysql/reflection.py,sha256=ak6E-eCP9346ixnILYNJcrRYblWbIT0sjXf4EqmfBsY,22556
sqlalchemy/dialects/mysql/reserved_words.py,sha256=DsPHsW3vwOrvU7bv3Nbfact2Z_jyZ9xUTT-mdeQvqxo,9145
sqlalchemy/dialects/mysql/types.py,sha256=i8DpRkOL1QhPErZ25AmCQOuFLciWhdjNL3I0CeHEhdY,24258
sqlalchemy/dialects/oracle/__init__.py,sha256=pjk1aWi9XFCAHWNSJzSzmoIcL32-AkU_1J9IV4PtwpA,1318
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc,,
sqlalchemy/dialects/oracle/base.py,sha256=u55_R9NrCRijud7ioHMxT-r0MSW0gMFjOwbrDdPgFsc,118036
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=L0GvcB6xb0-zyv5dx3bpQCeptp0KSqH6g9FUQ4y-d-g,55108
sqlalchemy/dialects/oracle/dictionary.py,sha256=iUoyFEFM8z0sfVWR2n_nnre14kaQkV_syKO0R5Dos4M,19487
sqlalchemy/dialects/oracle/oracledb.py,sha256=_-fUQ94xai80B7v9WLVGoGDIv8u54nVspBdyGEyI76g,3457
sqlalchemy/dialects/oracle/provision.py,sha256=5cvIc3yTWxz4AIRYxcesbRJ1Ft-zT9GauQ911yPnN2o,8055
sqlalchemy/dialects/oracle/types.py,sha256=TeOhUW5W9qZC8SaJ-9b3u6OvOPOarNq4MmCQ7l3wWX0,8204
sqlalchemy/dialects/postgresql/__init__.py,sha256=bZEPsLbRtB7s6TMQAHCIzKBgkxUa3eDXvCkeARua37E,3734
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc,,
sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=U3aWzbKD3VOj6Z6r-4IsIQmtjGGIB4RDZH6NXfd8Xz0,5655
sqlalchemy/dialects/postgresql/array.py,sha256=tLyU9GDAeIypNhjTuFQUYbaTeijVM1VVJS6UdzzXXn4,13682
sqlalchemy/dialects/postgresql/asyncpg.py,sha256=XNaoOZ5Da4-jUTaES1zEOTEW3WG8UKyVCoIS3LsFhzE,39967
sqlalchemy/dialects/postgresql/base.py,sha256=DGhaquFJWDQL7wIvQ2EE57LxD7zGR06BKQxvNZHFLgY,175634
sqlalchemy/dialects/postgresql/dml.py,sha256=_He69efdpDA5gGmBsE7Lo4ViSi3QnR38BiFmrR1tw6k,11203
sqlalchemy/dialects/postgresql/ext.py,sha256=oPP22Pq-n2lMmQ8ahifYmsmzRhSiSv1RV-xrTT0gycw,16253
sqlalchemy/dialects/postgresql/hstore.py,sha256=q5x0npbAMI8cdRFGTMwLoWFj9P1G9DUkw5OEUCfTXpI,11532
sqlalchemy/dialects/postgresql/json.py,sha256=panGtnEbcirQDy4yR2huWydFqa_Kmv8xhpLyf-SSRWE,11203
sqlalchemy/dialects/postgresql/named_types.py,sha256=zNoHsP3nVq5xxA7SOQ6LLDwYZEHFciZ-nDjw_I9f_G0,17092
sqlalchemy/dialects/postgresql/operators.py,sha256=MB40xq1124OnhUzkvtbnTmxEiey0VxMOYyznF96wwhI,2799
sqlalchemy/dialects/postgresql/pg8000.py,sha256=w6pJ3LaIKWmnwvB0Pr1aTJX5OKNtG5RNClVfkE019vU,18620
sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=0lLnIgxfCrqkx_LNijMxo0trNLsodcd8KwretZIj4uM,8875
sqlalchemy/dialects/postgresql/provision.py,sha256=oxyAzs8_PhuK0ChivXC3l2Nldih3_HKffvGsZqD8XWI,5509
sqlalchemy/dialects/postgresql/psycopg.py,sha256=YMubzQHMYN1By8QJScIPb_PwNiACv6srddQ6nX6WltQ,22238
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=3Xci4bTA2BvhrZAQa727uFWdaXEZmvfD-Z-upE3NyQE,31592
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=2EOuDwBetfvelcPoTzSwOHe6X8lTwaYH7znNzXJt9eM,1739
sqlalchemy/dialects/postgresql/ranges.py,sha256=yHB1BRlUreQPZB3VEn0KMMLf02zjf5jjYdmg4N4S2Sw,30220
sqlalchemy/dialects/postgresql/types.py,sha256=l24rs8_nK4vqLyQC0aUkf4S7ecw6T_7Pgq50Icc5CBs,7292
sqlalchemy/dialects/sqlite/__init__.py,sha256=wnZ9vtfm0QXmth1jiGiubFgRiKxIoQoNthb1bp4FhCs,1173
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc,,
sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=GZJioZLot0D5CQ6ovPQoqv2iV8FAFm3G75lEFCzopoE,12296
sqlalchemy/dialects/sqlite/base.py,sha256=YYEB5BeuemLC3FAR7EB8vA0zoUOwHTKoF_srvnAStps,96785
sqlalchemy/dialects/sqlite/dml.py,sha256=PYESBj8Ip7bGs_Fi7QjbWLXLnU9a-SbP96JZiUoZNHg,8434
sqlalchemy/dialects/sqlite/json.py,sha256=XFPwSdNx0DxDfxDZn7rmGGqsAgL4vpJbjjGaA73WruQ,2533
sqlalchemy/dialects/sqlite/provision.py,sha256=O4JDoybdb2RBblXErEVPE2P_5xHab927BQItJa203zU,5383
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=_JuOCoic--ehAGkCgnwUUKKTs6xYoBGag4Y_WkQUDwU,5347
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=xBg6DKqvml5cCGxVSAQxR1dcMvso8q4uyXs2m4WLzz0,27891
sqlalchemy/dialects/type_migration_guidelines.txt,sha256=-uHNdmYFGB7bzUNT6i8M5nb4j6j9YUKAtW4lcBZqsMg,8239
sqlalchemy/engine/__init__.py,sha256=fJCAl5P7JH9iwjuWo72_3LOIzWWhTnvXqzpAmm_T0fY,2818
sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/base.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/create.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/default.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/events.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/mock.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/processors.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/result.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/row.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/url.cpython-312.pyc,,
sqlalchemy/engine/__pycache__/util.cpython-312.pyc,,
sqlalchemy/engine/_py_processors.py,sha256=RSVKm9YppSBDSCEi8xvbZdRCP9EsCYfbyEg9iDCMCiI,3744
sqlalchemy/engine/_py_row.py,sha256=Zdta0JGa7V2aV04L7nzXUEp-H1gpresKyBlneQu60pk,3549
sqlalchemy/engine/_py_util.py,sha256=5m3MZbEqnUwP5kK_ghisFpzcXgBwSxTSkBEFB6afiD8,2245
sqlalchemy/engine/base.py,sha256=RbIfWZ1Otyb4VzMYjDpK5BiDIE8QZwa4vQgRX0yCa28,122246
sqlalchemy/engine/characteristics.py,sha256=YvMgrUVAt3wsSiQ0K8l44yBjFlMK3MGajxhg50t5yFM,2344
sqlalchemy/engine/create.py,sha256=8372TLpy4FOAIZ9WmuNkx1v9DPgwpoCAH9P7LNXZCwY,32629
sqlalchemy/engine/cursor.py,sha256=6e1Tp63r0Kt-P4pEaYR7wUew2aClTdKAEI-FoAAxJxE,74405
sqlalchemy/engine/default.py,sha256=bi--ytxYJ0EtsCudl38owGtytnwTHX-PjlsYTFe8LpA,84065
sqlalchemy/engine/events.py,sha256=PQyc_sbmqks6pqyN7xitO658KdKzzJWfW1TKYwEd5vo,37392
sqlalchemy/engine/interfaces.py,sha256=pAFYR15f1Z_-qdzTYI4mAm8IYbD6maLBKbG3pBaJ8Us,112824
sqlalchemy/engine/mock.py,sha256=ki4ud7YrUrzP2katdkxlJGFUKB2kS7cZZAHK5xWsNF8,4179
sqlalchemy/engine/processors.py,sha256=ENN6XwndxJPW-aXPu_3NzAZsy5SvNznHoa1Qn29ERAw,2383
sqlalchemy/engine/reflection.py,sha256=2aakNheQJNMUXZbhY8s1NtqGoGWTxM2THkJlMMfiX_s,75125
sqlalchemy/engine/result.py,sha256=shRAsboHPTvKR38ryGgC4KLcUeVTbABSlWzAfOUKVZs,77841
sqlalchemy/engine/row.py,sha256=doiXKaUI6s6OkfqPIwNyTPLllxJfR8HYgEI8ve9VYe0,11955
sqlalchemy/engine/strategies.py,sha256=HjCj_FHQOgkkhhtnVmcOEuHI_cftNo3P0hN5zkhZvDc,442
sqlalchemy/engine/url.py,sha256=_WNE7ia0JIPRc1PLY_jSA3F7bB5kp1gzuzkc5eoKviA,30694
sqlalchemy/engine/util.py,sha256=3-ENI9S-3KLWr0GW27uWQfsvCJwMBGTKbykkKPUgiAE,5667
sqlalchemy/event/__init__.py,sha256=CSBMp0yu5joTC6tWvx40B4p87N7oGKxC-ZLx2ULKUnQ,997
sqlalchemy/event/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/event/__pycache__/api.cpython-312.pyc,,
sqlalchemy/event/__pycache__/attr.cpython-312.pyc,,
sqlalchemy/event/__pycache__/base.cpython-312.pyc,,
sqlalchemy/event/__pycache__/legacy.cpython-312.pyc,,
sqlalchemy/event/__pycache__/registry.cpython-312.pyc,,
sqlalchemy/event/api.py,sha256=nQAvPK1jrLpmu8aKCUtc-vYWcIuG-1FgAtp3GRkfIiI,8227
sqlalchemy/event/attr.py,sha256=NMe_sPQTju2PE-f68C8TcKJGW-Gxyi1CLXumAmE368Y,20438
sqlalchemy/event/base.py,sha256=Cr_PNJlCYJSU3rtT8DkplyjBRb-E2Wa3OAeK9woFJkk,14980
sqlalchemy/event/legacy.py,sha256=OpPqE64xk1OYjLW1scvc6iijhoa5GZJt5f7-beWhgOc,8211
sqlalchemy/event/registry.py,sha256=Zig9q2Galo8kO2aqr7a2rNAhmIkdJ-ntHSEcM5MfSgw,10833
sqlalchemy/events.py,sha256=pRcPKKsPQHGPH_pvTtKRmzuEIy-QHCtkUiZl4MUbxKs,536
sqlalchemy/exc.py,sha256=4SMKOJtz7_SWt5vskCSeXSi4ZlFyL4jh53Q8sk4-ODQ,24011
sqlalchemy/ext/__init__.py,sha256=w4h7EpXjKPr0LD4yHa0pDCfrvleU3rrX7mgyb8RuDYQ,322
sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/automap.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/baked.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc,,
sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc,,
sqlalchemy/ext/associationproxy.py,sha256=5voNXWIJYGt6c8mwuSA6alm3SmEHOZ-CVK8ikgfzk8s,65960
sqlalchemy/ext/asyncio/__init__.py,sha256=iG_0TmBO1pCB316WS-p17AImwqRtUoaKo7UphYZ7bYw,1317
sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc,,
sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc,,
sqlalchemy/ext/asyncio/base.py,sha256=PXF4YqfRi2-mADAtaL2_-Uv7CzoBVojPbzyA5phJ9To,8959
sqlalchemy/ext/asyncio/engine.py,sha256=h4pe3ixuX6YfI97B5QWo2V4_CCCnOvM_EHPZhX19Mgc,47796
sqlalchemy/ext/asyncio/exc.py,sha256=1hCdOKzvSryc_YE4jgj0l9JASOmZXutdzShEYPiLbGI,639
sqlalchemy/ext/asyncio/result.py,sha256=zETerVB53gql1DL6tkO_JiqeU-m1OM-8kX0ULxmoL_I,30554
sqlalchemy/ext/asyncio/scoping.py,sha256=cBNluB7n_lwdAAo6pySbvNRqPN7UBzwQHZ6XhRDyWgA,52685
sqlalchemy/ext/asyncio/session.py,sha256=yWwhI5i_yVWjykxmxkcP3-xmw3UpoGYNhHZL8sYXQMA,62998
sqlalchemy/ext/automap.py,sha256=7p13-VpN0MOM525r7pmEnftedya9l5G-Ei_cFXZfpTc,61431
sqlalchemy/ext/baked.py,sha256=R8ZAxiVN6eH50AJu0O3TtFXNE1tnRkMlSj3AvkcWFhY,17818
sqlalchemy/ext/compiler.py,sha256=h7eR0NcPJ4F_k8YGRP3R9YX75Y9pgiVxoCjRyvceF7g,20391
sqlalchemy/ext/declarative/__init__.py,sha256=VJu8S1efxil20W48fJlpDn6gHorOudn5p3-lF72WcJ8,1818
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc,,
sqlalchemy/ext/declarative/extensions.py,sha256=vwZjudPFA_mao1U04-RZCaU_tvPMBgQa5OTmSI7K7SU,19547
sqlalchemy/ext/horizontal_shard.py,sha256=eh14W8QWHYH22PL1l5qF_ad9Fyh1WAFjKi_vNfsme94,16766
sqlalchemy/ext/hybrid.py,sha256=98D72WBmlileYBtEKMSNF9l-bwRavThSV8-LyB2gjo0,52499
sqlalchemy/ext/indexable.py,sha256=RkG9BKwil-TqDjVBM14ML9c-geUrHxtRKpYkSJEwGHA,11028
sqlalchemy/ext/instrumentation.py,sha256=rjjSbTGilYeGLdyEWV932TfTaGxiVP44_RajinANk54,15723
sqlalchemy/ext/mutable.py,sha256=d3Pp8PcAVN4pHN9rhc1ReXBWe0Q70Q5S1klFoYGyDPA,37393
sqlalchemy/ext/mypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc,,
sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc,,
sqlalchemy/ext/mypy/apply.py,sha256=uUES4grydYtKykLKlxzJeBXeGe8kfWou9_rzEyEkfp0,10503
sqlalchemy/ext/mypy/decl_class.py,sha256=Ls2Efh4kEhle6Z4VMz0GRBgGQTYs2fHr5b4DfuDj44c,17377
sqlalchemy/ext/mypy/infer.py,sha256=si720RW6iGxMRZNP5tcaIxA1_ehFp215TzxVXaLjglU,19364
sqlalchemy/ext/mypy/names.py,sha256=tch4f5fDmdv4AWWFzXgGZdCpxmae59XRPT02KyMvrEI,10625
sqlalchemy/ext/mypy/plugin.py,sha256=fLXDukvZqbJ0JJCOoyZAuOniYZ_F1YT-l9gKppu8SEs,9750
sqlalchemy/ext/mypy/util.py,sha256=TlEQq4bcs8ARLL3PoFS8Qw6oYFeMqcGnWTeJ7NsPPFk,9408
sqlalchemy/ext/orderinglist.py,sha256=8Vcg7UUkLg-QbYAbLVDSqu-5REkR6L-FLLhCYsHYxCQ,14384
sqlalchemy/ext/serializer.py,sha256=ox6dbMOBmFR0H2RQFt17mcYBOGKgn1cNVFfqY8-jpgQ,6178
sqlalchemy/future/__init__.py,sha256=79DZx3v7TQZpkS_qThlmuCOm1a9UK2ObNZhyMmjfNB0,516
sqlalchemy/future/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/future/__pycache__/engine.cpython-312.pyc,,
sqlalchemy/future/engine.py,sha256=6uOpOedIqiT1-3qJSJIlv9_raMJU8NTkhQwN_Ngg8kI,499
sqlalchemy/inspection.py,sha256=i3aR-IV101YU8D9TA8Pxb2wi08QZuJ34sMy6L5M__rY,5145
sqlalchemy/log.py,sha256=aSlZ8DFHkOuI-AMmaOUUYtS9zGPadi_7tAo98QpUOiY,8634
sqlalchemy/orm/__init__.py,sha256=cBn0aPWyDFY4ya-cHRshQBcuThk1smTUCTrlp6LHdlE,8463
sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/base.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/collections.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/context.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/events.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/exc.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/identity.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/loading.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/properties.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/query.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/session.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/state.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/sync.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/util.cpython-312.pyc,,
sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc,,
sqlalchemy/orm/_orm_constructors.py,sha256=_7_GY6qw2sA-GG_WXLz1GOO-0qC-SCBeA43GhVuS2Qw,99803
sqlalchemy/orm/_typing.py,sha256=oRUJVAGpU3_DhSkIb1anXgneweVIARjB51HlPhMNfcM,5015
sqlalchemy/orm/attributes.py,sha256=NFhYheqqu2VcXmKTdcvQKiRR_6qo0rHLK7nda7rpviA,92578
sqlalchemy/orm/base.py,sha256=iZXsygk4fn8wd7wx1iXn_PfnGDY7d41YRfS0mC_q5vE,27700
sqlalchemy/orm/bulk_persistence.py,sha256=S9VK5a6GSqnw3z7O5UG5OOnc9WxzmS_ooDkA5JmCIsY,69878
sqlalchemy/orm/clsregistry.py,sha256=4J-kKshmLOEyx3VBqREm2k_XY0cer4zwUoHJT3n5Xmw,17949
sqlalchemy/orm/collections.py,sha256=0AZFr9us9MiHo_Xcyi7DUsN02jSBERUOd-jIK8qQ1DA,52159
sqlalchemy/orm/context.py,sha256=VyJl1ZJ5OnJUACKlM-bPLyyoqu4tyaKKdxeC-QF4EuU,111698
sqlalchemy/orm/decl_api.py,sha256=a2Cyvjh6j5BlXJQ2i0jpQx7xkeI_6xo5MMxr0d2ndQY,63589
sqlalchemy/orm/decl_base.py,sha256=g9xW9G-n9iStMI0i3i-9Rt4LDRW8--3iCCRPlWF6Cko,81660
sqlalchemy/orm/dependency.py,sha256=g3R_1H_OGzagXFeen3Irm3c1lO3yeXGdGa0muUZgZAk,47583
sqlalchemy/orm/descriptor_props.py,sha256=SdrfVu05zhWLGe_DnBlgbU6e5sWkkfBTirH9Nrr1MLk,37176
sqlalchemy/orm/dynamic.py,sha256=pYlMIrpp80Ex4KByqdyhx0x0kIrl_cIADwkeVxvYu4s,9798
sqlalchemy/orm/evaluator.py,sha256=jPjVrP7XbVOG6aXTCBREq0rF3oNHLqB4XAT-gt_cpaA,11925
sqlalchemy/orm/events.py,sha256=fGnUHwDTV9FTiifB2mmIJispwPbIT4mZongRJD7uiw4,127258
sqlalchemy/orm/exc.py,sha256=A3wvZVs5sC5XCef4LoTUBG-UfhmliFpU9rYMdS2t_To,7356
sqlalchemy/orm/identity.py,sha256=gRiuQSrurHGEAJXH9QGYioXL49Im5EGcYQ-IKUEpHmQ,9249
sqlalchemy/orm/instrumentation.py,sha256=o1mTv5gCgl9d-SRvEXXjl8rzl8uBasRL3bpDgWg9P58,24337
sqlalchemy/orm/interfaces.py,sha256=RW7bBXGWtZHY2wXFOSqtvYm6UDl7yHZUyRX_6Yd3GfQ,48395
sqlalchemy/orm/loading.py,sha256=F1ZEHTPBglmznST2nGj_0ARccoFgTyaOOwjcqpYeuvM,57366
sqlalchemy/orm/mapped_collection.py,sha256=ZgYHaF37yo6-gZ7Da1Gg25rMgG2GynAy-RJoDhljV5g,19698
sqlalchemy/orm/mapper.py,sha256=kyq4pBkTvvEqlW4H4XK_ktP1sOiALNAycgvF5f-xtqw,170969
sqlalchemy/orm/path_registry.py,sha256=olyutgn0uNB7Wi32YNQx9ZHV6sUgV3TbyGplfSxfZ6g,25938
sqlalchemy/orm/persistence.py,sha256=qr1jUgo-NZ0tLa5eIis2271QDt4KNJwYlYU_9CaKNhQ,60545
sqlalchemy/orm/properties.py,sha256=dt1Gy06pbRY6zgm4QGR9nU6z2WCyoTZWBJYKpUhLq_c,29095
sqlalchemy/orm/query.py,sha256=VBSD0k15xU_XykggvLGAwGdwNglBAoBKbOk8qAoMKdI,117714
sqlalchemy/orm/relationships.py,sha256=wrHyICb8A5qPoyxf-nITQVJ13kCNr2MedDqEY8QMSt8,127816
sqlalchemy/orm/scoping.py,sha256=75iPEWDFhPcIXgl8EUd_sPTCL6punfegEaTRE5mP3e8,78835
sqlalchemy/orm/session.py,sha256=TeBcZNdY4HWQFdXNCIqbsQTtkvfJkBweMzvA9p3BiPA,193279
sqlalchemy/orm/state.py,sha256=EaWkVNWHaDeJ_FZGXHakSamUk51BXmtMWLGdFhlJmh8,37536
sqlalchemy/orm/state_changes.py,sha256=pqkjSDOR6H5BufMKdzFUIatDp3DY90SovOJiJ1k6Ayw,6815
sqlalchemy/orm/strategies.py,sha256=V0o-1kB1IVTxhOGqGtRyjddZqAbPdsl_h-k0N3MKCGo,114052
sqlalchemy/orm/strategy_options.py,sha256=EmgH28uMQhwwBCDVcXmywLk_Q8AbpnK02seMsMV4nmc,84102
sqlalchemy/orm/sync.py,sha256=5Nt_OqP4IfhAtHwFRar4dw-YjLENRLvp4d3jDC4wpnw,5749
sqlalchemy/orm/unitofwork.py,sha256=Wk5YZocBbxe4m1wU2aFQ7gY1Cp5CROi13kDEM1iOSz4,27033
sqlalchemy/orm/util.py,sha256=7hCRYbQjqhWJTkrPf_NXY9zF_18VWTpyguu-nfYfc6c,80340
sqlalchemy/orm/writeonly.py,sha256=WCPXCAwHqVCfhVWXQEFCP3OocIiHgqNJ5KnuJwSgGq4,22329
sqlalchemy/pool/__init__.py,sha256=CIv4b6ctueY7w3sML_LxyLKAdl59esYOhz3O7W5w7WE,1815
sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/pool/__pycache__/base.cpython-312.pyc,,
sqlalchemy/pool/__pycache__/events.cpython-312.pyc,,
sqlalchemy/pool/__pycache__/impl.cpython-312.pyc,,
sqlalchemy/pool/base.py,sha256=wuwKIak5d_4-TqKI2RFN8OYMEyOvV0djnoSVR8gbxAQ,52249
sqlalchemy/pool/events.py,sha256=IcWfORKbHM69Z9FdPJlXI7-NIhQrR9O_lg59tiUdTRU,13148
sqlalchemy/pool/impl.py,sha256=vU0n82a7uxdE34p3hU7cvUDA5QDy9MkIv1COT4kYFP8,17724
sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/schema.py,sha256=mt74CGCBtfv_qI1_6zzNFMexYGyWDj2Jkh-XdH4kEWI,3194
sqlalchemy/sql/__init__.py,sha256=jAQx9rwhyPhoSjntM1BZSElJiMRmLowGThJVDGvExSU,5820
sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/base.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/crud.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/dml.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/elements.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/events.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/expression.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/functions.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/naming.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/operators.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/roles.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/schema.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/util.cpython-312.pyc,,
sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc,,
sqlalchemy/sql/_dml_constructors.py,sha256=hoNyINY3FNi1ZQajR6lbcRN7oYsNghM1wuzzVWxIv3c,3867
sqlalchemy/sql/_elements_constructors.py,sha256=-qksx59Gqhmzxo1xByPtZZboNvL8uYcCN14pjHYHxL8,62914
sqlalchemy/sql/_orm_types.py,sha256=_vR3_HQYgZR_of6_ZpTQByie2gaVScxQjVAVWAP3Ztg,620
sqlalchemy/sql/_py_util.py,sha256=iiwgX3dQhOjdB5-10jtgHPIdibUqGk49bC1qdZMBpYI,2173
sqlalchemy/sql/_selectable_constructors.py,sha256=RDqgejqiUuU12Be1jBpMIx_YdJho8fhKfnMoJLPFTFE,18812
sqlalchemy/sql/_typing.py,sha256=C8kNZQ3TIpM-Q12Of3tTaESB1UxIfRME_lXouqgwMT8,12252
sqlalchemy/sql/annotation.py,sha256=pTNidcQatCar6H1I9YAoPP1e6sOewaJ15B7_-7ykZOE,18271
sqlalchemy/sql/base.py,sha256=dVvZoPoa3pb6iuwTU4QoCvVWQPyHZthaekl5J2zV_SU,73928
sqlalchemy/sql/cache_key.py,sha256=Dl163qHjTkMCa5LTipZud8X3w0d8DvdIvGvv4AqriHE,32823
sqlalchemy/sql/coercions.py,sha256=ju8xEi7b9G_GzxaQ6Nwu0cFIWFZ--ottIVfdiuhHY7Y,40553
sqlalchemy/sql/compiler.py,sha256=9Wx423H72Yq7NHR8cmMAH6GpMCJmghs1L85YJqs_Lng,268763
sqlalchemy/sql/crud.py,sha256=nyAPlmvuyWxMqSBdWPffC5P3CGXTQKK0bJoDbNgB3iQ,56457
sqlalchemy/sql/ddl.py,sha256=XuUhulJLvvPjU4nYD6N42QLg8rEgquD6Jwn_yIHZejk,45542
sqlalchemy/sql/default_comparator.py,sha256=SE0OaK1BlY0RinQ21ZXJOUGkO00oGv6GMMmAH-4iNTQ,16663
sqlalchemy/sql/dml.py,sha256=eftbzdFJgMk7NV0BHKfK4dQ2R7XsyyJn6fCgYFJ0KNQ,65728
sqlalchemy/sql/elements.py,sha256=dsNa2K57RygsGoaWuTMPp2QQ6SU3uZXSMW6CLGBbcIY,171208
sqlalchemy/sql/events.py,sha256=xe3vJ6pQJau3dJWBAY0zU7Lz52UKuMrpLycriLm3AWA,18301
sqlalchemy/sql/expression.py,sha256=baMnCH04jeE8E3tA2TovXlsREocA2j3fdHKnzOB8H4U,7586
sqlalchemy/sql/functions.py,sha256=AcI_KstJxeLw6rEXx6QnIgR2rq4Ru6RXMbq4EIIUURA,55319
sqlalchemy/sql/lambdas.py,sha256=EfDdUBi5cSmkjz8pQCSRo858UWQCFNZxXkM-1qS0CgU,49281
sqlalchemy/sql/naming.py,sha256=l8udFP2wvXLgehIB0uF2KXwpkXSVSREDk6fLCH9F-XY,6865
sqlalchemy/sql/operators.py,sha256=BYATjkBQLJAmwHAlGUSV-dv9RLtGw_ziAvFbKDrN4YU,76107
sqlalchemy/sql/roles.py,sha256=71zm_xpRkUdnu-WzG6lxQVnFHwvUjf6X6e3kRIkbzAs,7686
sqlalchemy/sql/schema.py,sha256=TOBTbcRY6ehosJEcpYn2NX0_UGZP9lfFs-o8lJVc5tI,228104
sqlalchemy/sql/selectable.py,sha256=9dO2yhN83zjna7nPjOE1hcvGyJGjc_lj5SAz7SP5CBQ,233041
sqlalchemy/sql/sqltypes.py,sha256=_0FpFLH0AFueb3TIB5Vcx9nXWDNj31XFQTP0u8OXnSo,126540
sqlalchemy/sql/traversals.py,sha256=7b98JSeLxqecmGHhhLXT_2M4QMke6W-xCci5RXndhxI,33521
sqlalchemy/sql/type_api.py,sha256=D9Kq-ppwZvlNmxaHqvVmM8IVg4n6_erzJpVioye9WKE,83823
sqlalchemy/sql/util.py,sha256=lBEAf_-eRepTErOBCp1PbEMZDYdJqAiK1GemQtgojYo,48175
sqlalchemy/sql/visitors.py,sha256=KD1qOYm6RdftCufVGB8q6jFTIZIQKS3zPCg78cVV0mQ,36427
sqlalchemy/testing/__init__.py,sha256=9M2SMxBBLJ8xLUWXNCWDzkcvOqFznWcJzrSd712vATU,3126
sqlalchemy/testing/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/assertions.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/assertsql.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/asyncio.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/config.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/engines.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/entities.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/exclusions.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/pickleable.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/profiling.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/provision.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/requirements.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/schema.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/util.cpython-312.pyc,,
sqlalchemy/testing/__pycache__/warnings.cpython-312.pyc,,
sqlalchemy/testing/assertions.py,sha256=lNNZ-gfF4TDRXmB7hZDdch7JYZRb_qWGeqWDFKtopx0,31439
sqlalchemy/testing/assertsql.py,sha256=EIVk3i5qjiSI63c1ikTPoGhulZl88SSeOS2VNo1LJvM,16817
sqlalchemy/testing/asyncio.py,sha256=cAw68tzu3h5wjdIKfOqhFATcbMb38XeK0ThjIalUHuQ,3728
sqlalchemy/testing/config.py,sha256=MZOWz7wqzc1pbwHWSAR0RJkt2C-SD6ox-nYY7VHdi_U,12030
sqlalchemy/testing/engines.py,sha256=w5-0FbanItRsOt6x4n7wM_OnToCzJnrvZZ2hk5Yzng8,13355
sqlalchemy/testing/entities.py,sha256=rysywsnjXHlIIC-uv0L7-fLmTAuNpHJvcSd1HeAdY5M,3354
sqlalchemy/testing/exclusions.py,sha256=uoYLEwyNOK1eR8rpfOZ2Q3dxgY0akM-RtsIFML-FPrY,12444
sqlalchemy/testing/fixtures/__init__.py,sha256=9snVns5A7g28LqC6gqQuO4xRBoJzdnf068GQ6Cae75I,1198
sqlalchemy/testing/fixtures/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/base.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/mypy.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/orm.cpython-312.pyc,,
sqlalchemy/testing/fixtures/__pycache__/sql.cpython-312.pyc,,
sqlalchemy/testing/fixtures/base.py,sha256=OayRr25soCqj1_yc665D5XbWWzFCm7Xl9Txtps953p4,12256
sqlalchemy/testing/fixtures/mypy.py,sha256=7fWVZzYzNjqmLIoFa-MmXSGDPS3eZYFXlH-WxaxBDDY,11845
sqlalchemy/testing/fixtures/orm.py,sha256=x27qjpK54JETATcYuiphtW-HXRy8ej8h3aCDkeQXPfY,6095
sqlalchemy/testing/fixtures/sql.py,sha256=Q7Qq0n4qTT681nWt5DqjThopgjv5BB2KmSmrmAxUqHM,15704
sqlalchemy/testing/pickleable.py,sha256=B9dXGF7E2PywB67SngHPjSMIBDTFhyAV4rkDUcyMulk,2833
sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,,
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-312.pyc,,
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-312.pyc,,
sqlalchemy/testing/plugin/bootstrap.py,sha256=GrBB27KbswjE3Tt-zJlj6uSqGh9N-_CXkonnJSSBz84,1437
sqlalchemy/testing/plugin/plugin_base.py,sha256=4SizjghFdDddt5o5gQ16Nw0bJHrtuBa4smxJcea-ti8,21573
sqlalchemy/testing/plugin/pytestplugin.py,sha256=yh4PP406O0TwPMDzpJHpcNdU2WHXCLYI10F3oOLePjE,27295
sqlalchemy/testing/profiling.py,sha256=HPjYvRLT1nD90FCZ7AA8j9ygkMtf1SGA47Xze2QPueo,10148
sqlalchemy/testing/provision.py,sha256=w4F_ceGHPpWHUeh6cVcE5ktCC-ISrGc2yOSnXauOd5U,14200
sqlalchemy/testing/requirements.py,sha256=gkviA8f5p4qdoDwAK791I4oGvnEqlm0ZZwJZpJzobFY,51393
sqlalchemy/testing/schema.py,sha256=OSfMoIJ7ORbevGkeJdrKcTrQ0s7wXebuCU08mC1Y9jA,6513
sqlalchemy/testing/suite/__init__.py,sha256=_firVc2uS3TMZ3vH2baQzNb17ubM78RHtb9kniSybmk,476
sqlalchemy/testing/suite/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_cte.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_results.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_select.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_types.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-312.pyc,,
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-312.pyc,,
sqlalchemy/testing/suite/test_cte.py,sha256=O5idVeBnHm9zdiG3tuCBUn4hYU_TA63-6LNnRygr8g0,6205
sqlalchemy/testing/suite/test_ddl.py,sha256=xWimTjggpTe3S1Xfmt_IPofTXkUUcKuVSVCIfIyGMbA,11785
sqlalchemy/testing/suite/test_deprecations.py,sha256=XI8ZU1NxC-6uvPDImaaq9O7Ov6MF5gmy-yk3TfesLAo,5082
sqlalchemy/testing/suite/test_dialect.py,sha256=HUpHZb7pnHbsoRpDLONpsCO_oWhBgjglU9pBO-EOUw4,22673
sqlalchemy/testing/suite/test_insert.py,sha256=Wm_pW0qqUNV1Fs7mXoxtmaTHMQGmaVDgDsYgZs1jlxM,18308
sqlalchemy/testing/suite/test_reflection.py,sha256=Nd4Ao_J3Sr-VeAeWbUe3gs6STPvik9DC37WkyJc-PVg,106205
sqlalchemy/testing/suite/test_results.py,sha256=Hd6R4jhBNNQSp0xGa8wwTgpw-XUrCEZ3dWXpoZ4_DKs,15687
sqlalchemy/testing/suite/test_rowcount.py,sha256=zhKVv0ibFSQmnE5luLwgHAn840zOJ6HxtkR3oL995cs,7652
sqlalchemy/testing/suite/test_select.py,sha256=QHsBX16EZpxlEZZLM0pMNcwayPU0dig39McKwiiith0,58325
sqlalchemy/testing/suite/test_sequence.py,sha256=c80CBWrU930GPnPfr9TCRbTTuITR7BpIactncLIj2XU,9672
sqlalchemy/testing/suite/test_types.py,sha256=QjV48MqR7dB8UVzt56UL2z7Nt28-IhywX3DKuQeLYsY,65429
sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=7obItCpFt4qlWaDqe25HWgQT6FoUhgz1W7_Xycfz9Xk,5887
sqlalchemy/testing/suite/test_update_delete.py,sha256=1hT0BTxB4SNipd6hnVlMnq25dLtQQoXov7z7UR0Sgi8,3658
sqlalchemy/testing/util.py,sha256=Wsu4GZgCW6wX9mmxfiffhDz1cZm3778OB3LtiWNgb3Y,14080
sqlalchemy/testing/warnings.py,sha256=pmfT33PF1q1PI7DdHOsup3LxHq1AC4-aYl1oL8HmrYo,1546
sqlalchemy/types.py,sha256=DgBpPaT-vtsn6_glx5wocrIhR2A1vy56SQNRY3NiPUw,3168
sqlalchemy/util/__init__.py,sha256=Bh0SkfkeCsz6-rbDmC41lAWOuCvKCiXVZthN2cWJEXk,8245
sqlalchemy/util/__pycache__/__init__.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_collections.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_has_cy.cpython-312.pyc,,
sqlalchemy/util/__pycache__/_py_collections.cpython-312.pyc,,
sqlalchemy/util/__pycache__/compat.cpython-312.pyc,,
sqlalchemy/util/__pycache__/concurrency.cpython-312.pyc,,
sqlalchemy/util/__pycache__/deprecations.cpython-312.pyc,,
sqlalchemy/util/__pycache__/langhelpers.cpython-312.pyc,,
sqlalchemy/util/__pycache__/preloaded.cpython-312.pyc,,
sqlalchemy/util/__pycache__/queue.cpython-312.pyc,,
sqlalchemy/util/__pycache__/tool_support.cpython-312.pyc,,
sqlalchemy/util/__pycache__/topological.cpython-312.pyc,,
sqlalchemy/util/__pycache__/typing.cpython-312.pyc,,
sqlalchemy/util/_collections.py,sha256=FYqVQg3CaqiEd21OFN1pNCfFbQ8gvlchW_TMtihSFNE,20169
sqlalchemy/util/_concurrency_py3k.py,sha256=31vs1oXaLzeTRgmOXRrWToRQskWmJk-CBs3-JxSTcck,8223
sqlalchemy/util/_has_cy.py,sha256=XMkeqCDGmhkd0uuzpCdyELz7gOjHxyFQ1AIlc5NneoY,1229
sqlalchemy/util/_py_collections.py,sha256=cYjsYLCLBy5jdGBJATLJCmtfzr_AaJ-HKTUN8OdAzxY,16630
sqlalchemy/util/compat.py,sha256=FkeHnW9asJYJvNmxVltee8jQNwQSdVRdKJlVRRInJI4,9388
sqlalchemy/util/concurrency.py,sha256=ZxcQYOKy-GBsQkPmCrBO5MzMpqW3JZme2Hiyqpbt9uc,2284
sqlalchemy/util/deprecations.py,sha256=pr9DSAf1ECqDk7X7F6TNc1jrhOeFihL33uEb5Wt2_T0,11971
sqlalchemy/util/langhelpers.py,sha256=CQQP2Q9c68nL5mcWL-Q38-INrtoDHDnBmq7QhnWyEDM,64980
sqlalchemy/util/preloaded.py,sha256=KKNLJEqChDW1TNUsM_TzKu7JYEA3kkuh2N-quM_2_Y4,5905
sqlalchemy/util/queue.py,sha256=ITejs6KS4Hz_ojrss2oFeUO9MoIeR3qWmZQ8J7yyrNU,10205
sqlalchemy/util/tool_support.py,sha256=epm8MzDZpVmhE6LIjrjJrP8BUf12Wab2m28A9lGq95s,5969
sqlalchemy/util/topological.py,sha256=hjJWL3C_B7Rpv9s7jj7wcTckcZUSkxc6xRDhiN1xyec,3458
sqlalchemy/util/typing.py,sha256=ESYm4oQtt-SarN04YTXCgovXT8tFupMiPmuGCDCMEIc,15831

View File

@@ -1,5 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (75.1.0)
Generator: bdist_wheel (0.41.3)
Root-Is-Purelib: false
Tag: cp312-cp312-manylinux_2_17_x86_64
Tag: cp312-cp312-manylinux2014_x86_64

View File

@@ -1,6 +1,6 @@
Metadata-Version: 2.3
Metadata-Version: 2.1
Name: aiofiles
Version: 24.1.0
Version: 23.2.1
Summary: File support for asyncio.
Project-URL: Changelog, https://github.com/Tinche/aiofiles#history
Project-URL: Bug Tracker, https://github.com/Tinche/aiofiles/issues
@@ -13,15 +13,15 @@ Classifier: Development Status :: 5 - Production/Stable
Classifier: Framework :: AsyncIO
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Requires-Python: >=3.8
Requires-Python: >=3.7
Description-Content-Type: text/markdown
# aiofiles: file support for asyncio
@@ -135,8 +135,6 @@ several useful `os` functions that deal with files:
- `listdir`
- `scandir`
- `access`
- `getcwd`
- `path.abspath`
- `path.exists`
- `path.isfile`
- `path.isdir`
@@ -178,50 +176,25 @@ as desired. The return type also needs to be registered with the
```python
aiofiles.threadpool.wrap.register(mock.MagicMock)(
lambda *args, **kwargs: aiofiles.threadpool.AsyncBufferedIOBase(*args, **kwargs)
)
lambda *args, **kwargs: threadpool.AsyncBufferedIOBase(*args, **kwargs))
async def test_stuff():
write_data = 'data'
read_file_chunks = [
b'file chunks 1',
b'file chunks 2',
b'file chunks 3',
b'',
]
file_chunks_iter = iter(read_file_chunks)
data = 'data'
mock_file = mock.MagicMock()
mock_file_stream = mock.MagicMock(
read=lambda *args, **kwargs: next(file_chunks_iter)
)
with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file_stream) as mock_open:
with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file) as mock_open:
async with aiofiles.open('filename', 'w') as f:
await f.write(write_data)
assert f.read() == b'file chunks 1'
await f.write(data)
mock_file_stream.write.assert_called_once_with(write_data)
mock_file.write.assert_called_once_with(data)
```
### History
#### 24.1.0 (2024-06-24)
- Import `os.link` conditionally to fix importing on android.
[#175](https://github.com/Tinche/aiofiles/issues/175)
- Remove spurious items from `aiofiles.os.__all__` when running on Windows.
- Switch to more modern async idioms: Remove types.coroutine and make AiofilesContextManager an awaitable instead a coroutine.
- Add `aiofiles.os.path.abspath` and `aiofiles.os.getcwd`.
[#174](https://github.com/Tinche/aiofiles/issues/181)
- _aiofiles_ is now tested on Python 3.13 too.
[#184](https://github.com/Tinche/aiofiles/pull/184)
- Dropped Python 3.7 support. If you require it, use version 23.2.1.
#### 23.2.1 (2023-08-09)
- Import `os.statvfs` conditionally to fix importing on non-UNIX systems.
[#171](https://github.com/Tinche/aiofiles/issues/171) [#172](https://github.com/Tinche/aiofiles/pull/172)
- aiofiles is now also tested on Windows.
#### 23.2.0 (2023-08-09)

View File

@@ -1,23 +1,23 @@
aiofiles-24.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
aiofiles-24.1.0.dist-info/METADATA,sha256=CvUJx21XclgI1Lp5Bt_4AyJesRYg0xCSx4exJZVmaSA,10708
aiofiles-24.1.0.dist-info/RECORD,,
aiofiles-24.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
aiofiles-24.1.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
aiofiles-24.1.0.dist-info/licenses/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325
aiofiles-24.1.0.dist-info/licenses/NOTICE,sha256=EExY0dRQvWR0wJ2LZLwBgnM6YKw9jCU-M0zegpRSD_E,55
aiofiles-23.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
aiofiles-23.2.1.dist-info/METADATA,sha256=cot28p_PNjdl_MK--l9Qu2e6QOv9OxdHrKbjLmYf9Uw,9673
aiofiles-23.2.1.dist-info/RECORD,,
aiofiles-23.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
aiofiles-23.2.1.dist-info/WHEEL,sha256=KGYbc1zXlYddvwxnNty23BeaKzh7YuoSIvIMO4jEhvw,87
aiofiles-23.2.1.dist-info/licenses/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325
aiofiles-23.2.1.dist-info/licenses/NOTICE,sha256=EExY0dRQvWR0wJ2LZLwBgnM6YKw9jCU-M0zegpRSD_E,55
aiofiles/__init__.py,sha256=1iAMJQyJtX3LGIS0AoFTJeO1aJ_RK2jpBSBhg0VoIrE,344
aiofiles/__pycache__/__init__.cpython-312.pyc,,
aiofiles/__pycache__/base.cpython-312.pyc,,
aiofiles/__pycache__/os.cpython-312.pyc,,
aiofiles/__pycache__/ospath.cpython-312.pyc,,
aiofiles/base.py,sha256=zo0FgkCqZ5aosjvxqIvDf2t-RFg1Lc6X8P6rZ56p6fQ,1784
aiofiles/os.py,sha256=0DrsG-eH4h7xRzglv9pIWsQuzqe7ZhVYw5FQS18fIys,1153
aiofiles/ospath.py,sha256=WaYelz_k6ykAFRLStr4bqYIfCVQ-5GGzIqIizykbY2Q,794
aiofiles/base.py,sha256=rZwA151Ji8XlBkzvDmcF1CgDTY2iKNuJMfvNlM0s0E0,2684
aiofiles/os.py,sha256=zuFGaIyGCGUuFb7trFFEm6SLdCRqTFsSV0mY6SO8z3M,970
aiofiles/ospath.py,sha256=zqG2VFzRb6yYiIOWipqsdgvZmoMTFvZmBdkxkAl1FT4,764
aiofiles/tempfile/__init__.py,sha256=hFSNTOjOUv371Ozdfy6FIxeln46Nm3xOVh4ZR3Q94V0,10244
aiofiles/tempfile/__pycache__/__init__.cpython-312.pyc,,
aiofiles/tempfile/__pycache__/temptypes.cpython-312.pyc,,
aiofiles/tempfile/temptypes.py,sha256=ddEvNjMLVlr7WUILCe6ypTqw77yREeIonTk16Uw_NVs,2093
aiofiles/threadpool/__init__.py,sha256=kt0hwwx3bLiYtnA1SORhW8mJ6z4W9Xr7MbY80UIJJrI,3133
aiofiles/threadpool/__init__.py,sha256=c_aexl1t193iKdPZaolPEEbHDrQ0RrsH_HTAToMPQBo,3171
aiofiles/threadpool/__pycache__/__init__.cpython-312.pyc,,
aiofiles/threadpool/__pycache__/binary.cpython-312.pyc,,
aiofiles/threadpool/__pycache__/text.cpython-312.pyc,,

View File

@@ -1,4 +1,4 @@
Wheel-Version: 1.0
Generator: hatchling 1.25.0
Generator: hatchling 1.17.1
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -1,6 +1,6 @@
"""Various base classes."""
from collections.abc import Awaitable
from contextlib import AbstractAsyncContextManager
from types import coroutine
from collections.abc import Coroutine
from asyncio import get_running_loop
@@ -45,22 +45,66 @@ class AsyncIndirectBase(AsyncBase):
pass # discard writes
class AiofilesContextManager(Awaitable, AbstractAsyncContextManager):
"""An adjusted async context manager for aiofiles."""
class _ContextManager(Coroutine):
__slots__ = ("_coro", "_obj")
def __init__(self, coro):
self._coro = coro
self._obj = None
def send(self, value):
return self._coro.send(value)
def throw(self, typ, val=None, tb=None):
if val is None:
return self._coro.throw(typ)
elif tb is None:
return self._coro.throw(typ, val)
else:
return self._coro.throw(typ, val, tb)
def close(self):
return self._coro.close()
@property
def gi_frame(self):
return self._coro.gi_frame
@property
def gi_running(self):
return self._coro.gi_running
@property
def gi_code(self):
return self._coro.gi_code
def __next__(self):
return self.send(None)
@coroutine
def __iter__(self):
resp = yield from self._coro
return resp
def __await__(self):
if self._obj is None:
self._obj = yield from self._coro.__await__()
return self._obj
resp = yield from self._coro
return resp
async def __anext__(self):
resp = await self._coro
return resp
async def __aenter__(self):
return await self
self._obj = await self._coro
return self._obj
async def __aexit__(self, exc_type, exc, tb):
self._obj.close()
self._obj = None
class AiofilesContextManager(_ContextManager):
"""An adjusted async context manager for aiofiles."""
async def __aexit__(self, exc_type, exc_val, exc_tb):
await get_running_loop().run_in_executor(

View File

@@ -1,5 +1,4 @@
"""Async executor versions of file functions from the os module."""
import os
from . import ospath as path
@@ -8,6 +7,7 @@ from .ospath import wrap
__all__ = [
"path",
"stat",
"statvfs",
"rename",
"renames",
"replace",
@@ -17,20 +17,15 @@ __all__ = [
"makedirs",
"rmdir",
"removedirs",
"link",
"symlink",
"readlink",
"listdir",
"scandir",
"access",
"sendfile",
"wrap",
"getcwd",
]
if hasattr(os, "link"):
__all__ += ["link"]
if hasattr(os, "sendfile"):
__all__ += ["sendfile"]
if hasattr(os, "statvfs"):
__all__ += ["statvfs"]
stat = wrap(os.stat)
@@ -43,15 +38,13 @@ mkdir = wrap(os.mkdir)
makedirs = wrap(os.makedirs)
rmdir = wrap(os.rmdir)
removedirs = wrap(os.removedirs)
link = wrap(os.link)
symlink = wrap(os.symlink)
readlink = wrap(os.readlink)
listdir = wrap(os.listdir)
scandir = wrap(os.scandir)
access = wrap(os.access)
getcwd = wrap(os.getcwd)
if hasattr(os, "link"):
link = wrap(os.link)
if hasattr(os, "sendfile"):
sendfile = wrap(os.sendfile)
if hasattr(os, "statvfs"):

View File

@@ -1,5 +1,4 @@
"""Async executor versions of file functions from the os.path module."""
import asyncio
from functools import partial, wraps
from os import path
@@ -27,4 +26,3 @@ getatime = wrap(path.getatime)
getctime = wrap(path.getctime)
samefile = wrap(path.samefile)
sameopenfile = wrap(path.sameopenfile)
abspath = wrap(path.abspath)

View File

@@ -10,6 +10,7 @@ from io import (
FileIO,
TextIOBase,
)
from types import coroutine
from ..base import AiofilesContextManager
from .binary import (
@@ -62,7 +63,8 @@ def open(
)
async def _open(
@coroutine
def _open(
file,
mode="r",
buffering=-1,
@@ -89,7 +91,7 @@ async def _open(
closefd=closefd,
opener=opener,
)
f = await loop.run_in_executor(executor, cb)
f = yield from loop.run_in_executor(executor, cb)
return wrap(f, loop=loop, executor=executor)

View File

@@ -1,4 +1,4 @@
Copyright 2009-2025 Michael Bayer.
Copyright 2009-2023 Michael Bayer.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
@@ -16,4 +16,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
SOFTWARE.

View File

@@ -1,10 +1,11 @@
Metadata-Version: 2.4
Metadata-Version: 2.1
Name: alembic
Version: 1.16.5
Version: 1.12.1
Summary: A database migration tool for SQLAlchemy.
Author-email: Mike Bayer <mike_mp@zzzcomputing.com>
License-Expression: MIT
Project-URL: Homepage, https://alembic.sqlalchemy.org
Home-page: https://alembic.sqlalchemy.org
Author: Mike Bayer
Author-email: mike_mp@zzzcomputing.com
License: MIT
Project-URL: Documentation, https://alembic.sqlalchemy.org/en/latest/
Project-URL: Changelog, https://alembic.sqlalchemy.org/en/latest/changelog.html
Project-URL: Source, https://github.com/sqlalchemy/alembic/
@@ -12,27 +13,27 @@ Project-URL: Issue Tracker, https://github.com/sqlalchemy/alembic/issues/
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: Environment :: Console
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Database :: Front-Ends
Requires-Python: >=3.9
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: SQLAlchemy>=1.4.0
Requires-Dist: SQLAlchemy >=1.3.0
Requires-Dist: Mako
Requires-Dist: typing-extensions>=4.12
Requires-Dist: tomli; python_version < "3.11"
Requires-Dist: typing-extensions >=4
Requires-Dist: importlib-metadata ; python_version < "3.9"
Requires-Dist: importlib-resources ; python_version < "3.9"
Provides-Extra: tz
Requires-Dist: tzdata; extra == "tz"
Dynamic: license-file
Requires-Dist: python-dateutil ; extra == 'tz'
Alembic is a database migrations tool written by the author
of `SQLAlchemy <http://www.sqlalchemy.org>`_. A migrations tool

View File

@@ -0,0 +1,149 @@
../../../bin/alembic,sha256=kheZTewTBSd6rruOpyoj8QhFdGKiaj38MUFgBD5whig,238
alembic-1.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
alembic-1.12.1.dist-info/LICENSE,sha256=soUmiob0QW6vTQWyrjiAwVb3xZqPk1pAK8BW6vszrwg,1058
alembic-1.12.1.dist-info/METADATA,sha256=D9-LeKL0unLPg2JKmlFMB5NAxt9N9y-8oVEGOUHbQnU,7306
alembic-1.12.1.dist-info/RECORD,,
alembic-1.12.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
alembic-1.12.1.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
alembic-1.12.1.dist-info/entry_points.txt,sha256=aykM30soxwGN0pB7etLc1q0cHJbL9dy46RnK9VX4LLw,48
alembic-1.12.1.dist-info/top_level.txt,sha256=FwKWd5VsPFC8iQjpu1u9Cn-JnK3-V1RhUCmWqz1cl-s,8
alembic/__init__.py,sha256=gczqgDgBRw3aV70aNeH6WGu0WdASQf_YiChV12qCRRI,75
alembic/__main__.py,sha256=373m7-TBh72JqrSMYviGrxCHZo-cnweM8AGF8A22PmY,78
alembic/__pycache__/__init__.cpython-312.pyc,,
alembic/__pycache__/__main__.cpython-312.pyc,,
alembic/__pycache__/command.cpython-312.pyc,,
alembic/__pycache__/config.cpython-312.pyc,,
alembic/__pycache__/context.cpython-312.pyc,,
alembic/__pycache__/environment.cpython-312.pyc,,
alembic/__pycache__/migration.cpython-312.pyc,,
alembic/__pycache__/op.cpython-312.pyc,,
alembic/autogenerate/__init__.py,sha256=4IHgWH89pForRq-yCDZhGjjVtsfGX5ECWNPuUs8nGUk,351
alembic/autogenerate/__pycache__/__init__.cpython-312.pyc,,
alembic/autogenerate/__pycache__/api.cpython-312.pyc,,
alembic/autogenerate/__pycache__/compare.cpython-312.pyc,,
alembic/autogenerate/__pycache__/render.cpython-312.pyc,,
alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc,,
alembic/autogenerate/api.py,sha256=MNn0Xtmj44aMFjfiR0LMkbxOynHyiyaRBnrj5EkImm4,21967
alembic/autogenerate/compare.py,sha256=gSCjxrkQAl0rJD6o9Ln8wNxGVNU6FrWzKZYVkH5Tmac,47042
alembic/autogenerate/render.py,sha256=Fik2aPZEIxOlTCrBd0UiPxnX5SFG__CvfXqMWoJr6lw,34475
alembic/autogenerate/rewriter.py,sha256=Osba8GFVeqiX1ypGJW7Axt0ui2EROlaFtVZdMFbhzZ0,7384
alembic/command.py,sha256=ze4pYvKpB-FtF8rduY6F6n3XHqeA-15iXaaEDeNHVzI,21588
alembic/config.py,sha256=68e1nmYU5Nfh0bNRqRWUygSilDl1p0G_U1zZ8ifgmD8,21931
alembic/context.py,sha256=hK1AJOQXJ29Bhn276GYcosxeG7pC5aZRT5E8c4bMJ4Q,195
alembic/context.pyi,sha256=FLsT0be_vO_ozlC05EJkWR5olDPoTVq-7tgtoM5wSAw,31463
alembic/ddl/__init__.py,sha256=xXr1W6PePe0gCLwR42ude0E6iru9miUFc1fCeQN4YP8,137
alembic/ddl/__pycache__/__init__.cpython-312.pyc,,
alembic/ddl/__pycache__/base.cpython-312.pyc,,
alembic/ddl/__pycache__/impl.cpython-312.pyc,,
alembic/ddl/__pycache__/mssql.cpython-312.pyc,,
alembic/ddl/__pycache__/mysql.cpython-312.pyc,,
alembic/ddl/__pycache__/oracle.cpython-312.pyc,,
alembic/ddl/__pycache__/postgresql.cpython-312.pyc,,
alembic/ddl/__pycache__/sqlite.cpython-312.pyc,,
alembic/ddl/base.py,sha256=cCY3NldMRggrKd9bZ0mFRBE9GNDaAy0UJcM3ey4Utgw,9638
alembic/ddl/impl.py,sha256=Z3GpNM2KwBpfl1UCam1YsYbSd0mQzRigOKQhUCLIPgE,25564
alembic/ddl/mssql.py,sha256=0k26xnUSZNj3qCHEMzRFbaWgUzKcV07I3_-Ns47VhO0,14105
alembic/ddl/mysql.py,sha256=ff8OE0zQ8YYjAgltBbtjQkDR-g9z65DNeFjEMm4sX6c,16675
alembic/ddl/oracle.py,sha256=E0VaZaUM_5mwqNiJVA3zOAK-cuHVVIv_-NmUbH1JuGQ,6097
alembic/ddl/postgresql.py,sha256=aO8pcVN5ycw1wG2m1RRt8dQUD1KgRa6T4rSzg9FPCkU,26457
alembic/ddl/sqlite.py,sha256=9q7NAxyeFwn9kWwQSc9RLeMFSos8waM7x9lnXdByh44,7613
alembic/environment.py,sha256=MM5lPayGT04H3aeng1H7GQ8HEAs3VGX5yy6mDLCPLT4,43
alembic/migration.py,sha256=MV6Fju6rZtn2fTREKzXrCZM6aIBGII4OMZFix0X-GLs,41
alembic/op.py,sha256=flHtcsVqOD-ZgZKK2pv-CJ5Cwh-KJ7puMUNXzishxLw,167
alembic/op.pyi,sha256=ldQBwAfzm_-ZsC3nizMuGoD34hjMKb4V_-Q1rR8q8LI,48591
alembic/operations/__init__.py,sha256=e0KQSZAgLpTWvyvreB7DWg7RJV_MWSOPVDgCqsd2FzY,318
alembic/operations/__pycache__/__init__.cpython-312.pyc,,
alembic/operations/__pycache__/base.cpython-312.pyc,,
alembic/operations/__pycache__/batch.cpython-312.pyc,,
alembic/operations/__pycache__/ops.cpython-312.pyc,,
alembic/operations/__pycache__/schemaobj.cpython-312.pyc,,
alembic/operations/__pycache__/toimpl.cpython-312.pyc,,
alembic/operations/base.py,sha256=2so4KisDNuOLw0CRiZqorIHrhuenpVoFbn3B0sNvDic,72471
alembic/operations/batch.py,sha256=uMvGJDlcTs0GSHasg4Gsdv1YcXeLOK_1lkRl3jk1ezY,26954
alembic/operations/ops.py,sha256=aP9Uz36k98O_Y-njKIAifyvyhi0g2zU6_igKMos91_s,93539
alembic/operations/schemaobj.py,sha256=-tWad8pgWUNWucbpTnPuFK_EEl913C0RADJhlBnrjhc,9393
alembic/operations/toimpl.py,sha256=K8nUmojtL94tyLSWdDD-e94IbghZ19k55iBIMvzMm5E,6993
alembic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
alembic/runtime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
alembic/runtime/__pycache__/__init__.cpython-312.pyc,,
alembic/runtime/__pycache__/environment.cpython-312.pyc,,
alembic/runtime/__pycache__/migration.cpython-312.pyc,,
alembic/runtime/environment.py,sha256=qaerrw5jB7zYliNnCvIziaju4-tvQ451MuGW8PHnfvw,41019
alembic/runtime/migration.py,sha256=5UtTI_T0JtYzt6ZpeUhannMZOvXWiEymKFOpeCefaPY,49407
alembic/script/__init__.py,sha256=lSj06O391Iy5avWAiq8SPs6N8RBgxkSPjP8wpXcNDGg,100
alembic/script/__pycache__/__init__.cpython-312.pyc,,
alembic/script/__pycache__/base.cpython-312.pyc,,
alembic/script/__pycache__/revision.cpython-312.pyc,,
alembic/script/__pycache__/write_hooks.cpython-312.pyc,,
alembic/script/base.py,sha256=90SpT8wyTMTUuS0Svsy5YIoqJSrR-6CtYSzStmRvFT0,37174
alembic/script/revision.py,sha256=DE0nwvDOzdFo843brvnhs1DfP0jRC5EVQHrNihC7PUQ,61471
alembic/script/write_hooks.py,sha256=Nqj4zz3sm97kAPOpK1m-i2znJchiybO_TWT50oljlJw,4917
alembic/templates/async/README,sha256=ISVtAOvqvKk_5ThM5ioJE-lMkvf9IbknFUFVU_vPma4,58
alembic/templates/async/__pycache__/env.cpython-312.pyc,,
alembic/templates/async/alembic.ini.mako,sha256=k3IyGDG15Rp1JDweC0TiDauaKYNvj3clrGfhw6oV6MI,3505
alembic/templates/async/env.py,sha256=zbOCf3Y7w2lg92hxSwmG1MM_7y56i_oRH4AKp0pQBYo,2389
alembic/templates/async/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
alembic/templates/generic/README,sha256=MVlc9TYmr57RbhXET6QxgyCcwWP7w-vLkEsirENqiIQ,38
alembic/templates/generic/__pycache__/env.cpython-312.pyc,,
alembic/templates/generic/alembic.ini.mako,sha256=gZWFmH2A9sP0i7cxEDhJFkjGtTKUXaVna8QAbIaRqxk,3614
alembic/templates/generic/env.py,sha256=TLRWOVW3Xpt_Tpf8JFzlnoPn_qoUu8UV77Y4o9XD6yI,2103
alembic/templates/generic/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
alembic/templates/multidb/README,sha256=dWLDhnBgphA4Nzb7sNlMfCS3_06YqVbHhz-9O5JNqyI,606
alembic/templates/multidb/__pycache__/env.cpython-312.pyc,,
alembic/templates/multidb/alembic.ini.mako,sha256=j_Y0yuZVoHy7sTPgSPd8DmbT2ItvAdWs7trYZSOmFnw,3708
alembic/templates/multidb/env.py,sha256=6zNjnW8mXGUk7erTsAvrfhvqoczJ-gagjVq1Ypg2YIQ,4230
alembic/templates/multidb/script.py.mako,sha256=N06nMtNSwHkgl0EBXDyMt8njp9tlOesR583gfq21nbY,1090
alembic/testing/__init__.py,sha256=kOxOh5nwmui9d-_CCq9WA4Udwy7ITjm453w74CTLZDo,1159
alembic/testing/__pycache__/__init__.cpython-312.pyc,,
alembic/testing/__pycache__/assertions.cpython-312.pyc,,
alembic/testing/__pycache__/env.cpython-312.pyc,,
alembic/testing/__pycache__/fixtures.cpython-312.pyc,,
alembic/testing/__pycache__/requirements.cpython-312.pyc,,
alembic/testing/__pycache__/schemacompare.cpython-312.pyc,,
alembic/testing/__pycache__/util.cpython-312.pyc,,
alembic/testing/__pycache__/warnings.cpython-312.pyc,,
alembic/testing/assertions.py,sha256=1CbJk8c8-WO9eJ0XJ0jJvMsNRLUrXV41NOeIJUAlOBk,5015
alembic/testing/env.py,sha256=zJacVb_z6uLs2U1TtkmnFH9P3_F-3IfYbVv4UEPOvfo,10754
alembic/testing/fixtures.py,sha256=NyP4wE_dFN9ZzSGiBagRu1cdzkka03nwJYJYHYrrkSY,9112
alembic/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc,,
alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,,
alembic/testing/plugin/bootstrap.py,sha256=9C6wtjGrIVztZ928w27hsQE0KcjDLIUtUN3dvZKsMVk,50
alembic/testing/requirements.py,sha256=WByOiJxn2crazIXPq6-0cfqV95cfd9vP_ZQ1Cf2l8hY,4841
alembic/testing/schemacompare.py,sha256=7_4_0Y4UvuMiZ66pz1RC_P8Z1kYOP-R4Y5qUcNmcMKA,4535
alembic/testing/suite/__init__.py,sha256=MvE7-hwbaVN1q3NM-ztGxORU9dnIelUCINKqNxewn7Y,288
alembic/testing/suite/__pycache__/__init__.cpython-312.pyc,,
alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_op.cpython-312.pyc,,
alembic/testing/suite/_autogen_fixtures.py,sha256=cDq1pmzHe15S6dZPGNC6sqFaCQ3hLT_oPV2IDigUGQ0,9880
alembic/testing/suite/test_autogen_comments.py,sha256=aEGqKUDw4kHjnDk298aoGcQvXJWmZXcIX_2FxH4cJK8,6283
alembic/testing/suite/test_autogen_computed.py,sha256=qJeBpc8urnwTFvbwWrSTIbHVkRUuCXP-dKaNbUK2U2U,6077
alembic/testing/suite/test_autogen_diffs.py,sha256=T4SR1n_kmcOKYhR4W1-dA0e5sddJ69DSVL2HW96kAkE,8394
alembic/testing/suite/test_autogen_fks.py,sha256=AqFmb26Buex167HYa9dZWOk8x-JlB1OK3bwcvvjDFaU,32927
alembic/testing/suite/test_autogen_identity.py,sha256=kcuqngG7qXAKPJDX4U8sRzPKHEJECHuZ0DtuaS6tVkk,5824
alembic/testing/suite/test_environment.py,sha256=w9F0xnLEbALeR8k6_-Tz6JHvy91IqiTSypNasVzXfZQ,11877
alembic/testing/suite/test_op.py,sha256=2XQCdm_NmnPxHGuGj7hmxMzIhKxXNotUsKdACXzE1mM,1343
alembic/testing/util.py,sha256=CQrcQDA8fs_7ME85z5ydb-Bt70soIIID-qNY1vbR2dg,3350
alembic/testing/warnings.py,sha256=RxA7x_8GseANgw07Us8JN_1iGbANxaw6_VitX2ZGQH4,1078
alembic/util/__init__.py,sha256=cPF_jjFx7YRBByHHDqW3wxCIHsqnGfncEr_i238aduY,1202
alembic/util/__pycache__/__init__.cpython-312.pyc,,
alembic/util/__pycache__/compat.cpython-312.pyc,,
alembic/util/__pycache__/editor.cpython-312.pyc,,
alembic/util/__pycache__/exc.cpython-312.pyc,,
alembic/util/__pycache__/langhelpers.cpython-312.pyc,,
alembic/util/__pycache__/messaging.cpython-312.pyc,,
alembic/util/__pycache__/pyfiles.cpython-312.pyc,,
alembic/util/__pycache__/sqla_compat.cpython-312.pyc,,
alembic/util/compat.py,sha256=WN8jPPFB9ri_uuEM1HEaN1ak3RJc_H3x8NqvtFkoXuM,2279
alembic/util/editor.py,sha256=JIz6_BdgV8_oKtnheR6DZoB7qnrHrlRgWjx09AsTsUw,2546
alembic/util/exc.py,sha256=KQTru4zcgAmN4IxLMwLFS56XToUewaXB7oOLcPNjPwg,98
alembic/util/langhelpers.py,sha256=ZFGyGygHRbztOeajpajppyhd-Gp4PB5slMuvCFVrnmg,8591
alembic/util/messaging.py,sha256=B6T-loMhIOY3OTbG47Ywp1Df9LZn18PgjwpwLrD1VNg,3042
alembic/util/pyfiles.py,sha256=95J01FChN0j2uP3p72mjaOQvh5wC6XbdGtTDK8oEzsQ,3373
alembic/util/sqla_compat.py,sha256=94MHlkj43y-QQySz5dCUiJUNOPr3BF9TQ_BrP6ey-8w,18906

View File

@@ -1,5 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (80.9.0)
Generator: bdist_wheel (0.41.2)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -1,163 +0,0 @@
../../../bin/alembic,sha256=_J6yD4KtWGrilKk3GrsJKTd-33Dqp4ejOp_LNh0fQNs,234
alembic-1.16.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
alembic-1.16.5.dist-info/METADATA,sha256=_hKTp0jnKI77a2esxmoCXgv5t2U8hDZS7yZDRkDBl0k,7265
alembic-1.16.5.dist-info/RECORD,,
alembic-1.16.5.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
alembic-1.16.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
alembic-1.16.5.dist-info/entry_points.txt,sha256=aykM30soxwGN0pB7etLc1q0cHJbL9dy46RnK9VX4LLw,48
alembic-1.16.5.dist-info/licenses/LICENSE,sha256=NeqcNBmyYfrxvkSMT0fZJVKBv2s2tf_qVQUiJ9S6VN4,1059
alembic-1.16.5.dist-info/top_level.txt,sha256=FwKWd5VsPFC8iQjpu1u9Cn-JnK3-V1RhUCmWqz1cl-s,8
alembic/__init__.py,sha256=H_hItDeyDOrQAHc1AFoYXIRN3O3FSxw4zSNiVzz2JlM,63
alembic/__main__.py,sha256=373m7-TBh72JqrSMYviGrxCHZo-cnweM8AGF8A22PmY,78
alembic/__pycache__/__init__.cpython-312.pyc,,
alembic/__pycache__/__main__.cpython-312.pyc,,
alembic/__pycache__/command.cpython-312.pyc,,
alembic/__pycache__/config.cpython-312.pyc,,
alembic/__pycache__/context.cpython-312.pyc,,
alembic/__pycache__/environment.cpython-312.pyc,,
alembic/__pycache__/migration.cpython-312.pyc,,
alembic/__pycache__/op.cpython-312.pyc,,
alembic/autogenerate/__init__.py,sha256=ntmUTXhjLm4_zmqIwyVaECdpPDn6_u1yM9vYk6-553E,543
alembic/autogenerate/__pycache__/__init__.cpython-312.pyc,,
alembic/autogenerate/__pycache__/api.cpython-312.pyc,,
alembic/autogenerate/__pycache__/compare.cpython-312.pyc,,
alembic/autogenerate/__pycache__/render.cpython-312.pyc,,
alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc,,
alembic/autogenerate/api.py,sha256=L4qkapSJO1Ypymx8HsjLl0vFFt202agwMYsQbIe6ZtI,22219
alembic/autogenerate/compare.py,sha256=LRTxNijEBvcTauuUXuJjC6Sg_gUn33FCYBTF0neZFwE,45979
alembic/autogenerate/render.py,sha256=ceQL8nk8m2kBtQq5gtxtDLR9iR0Sck8xG_61Oez-Sqs,37270
alembic/autogenerate/rewriter.py,sha256=NIASSS-KaNKPmbm1k4pE45aawwjSh1Acf6eZrOwnUGM,7814
alembic/command.py,sha256=pZPQUGSxCjFu7qy0HMe02HJmByM0LOqoiK2AXKfRO3A,24855
alembic/config.py,sha256=nfwN_OOFPpee-OY4o10DANh7VG_E4O7bdW00Wx8NNKY,34237
alembic/context.py,sha256=hK1AJOQXJ29Bhn276GYcosxeG7pC5aZRT5E8c4bMJ4Q,195
alembic/context.pyi,sha256=fdeFNTRc0bUgi7n2eZWVFh6NG-TzIv_0gAcapbfHnKY,31773
alembic/ddl/__init__.py,sha256=Df8fy4Vn_abP8B7q3x8gyFwEwnLw6hs2Ljt_bV3EZWE,152
alembic/ddl/__pycache__/__init__.cpython-312.pyc,,
alembic/ddl/__pycache__/_autogen.cpython-312.pyc,,
alembic/ddl/__pycache__/base.cpython-312.pyc,,
alembic/ddl/__pycache__/impl.cpython-312.pyc,,
alembic/ddl/__pycache__/mssql.cpython-312.pyc,,
alembic/ddl/__pycache__/mysql.cpython-312.pyc,,
alembic/ddl/__pycache__/oracle.cpython-312.pyc,,
alembic/ddl/__pycache__/postgresql.cpython-312.pyc,,
alembic/ddl/__pycache__/sqlite.cpython-312.pyc,,
alembic/ddl/_autogen.py,sha256=Blv2RrHNyF4cE6znCQXNXG5T9aO-YmiwD4Fz-qfoaWA,9275
alembic/ddl/base.py,sha256=A1f89-rCZvqw-hgWmBbIszRqx94lL6gKLFXE9kHettA,10478
alembic/ddl/impl.py,sha256=UL8-iza7CJk_T73lr5fjDLdhxEL56uD-AEjtmESAbLk,30439
alembic/ddl/mssql.py,sha256=NzORSIDHUll_g6iH4IyMTXZU1qjKzXrpespKrjWnfLY,14216
alembic/ddl/mysql.py,sha256=LSfwiABdT54sKY_uQ-h6RvjbGiG-1vCSDkO3ECeq3qM,18383
alembic/ddl/oracle.py,sha256=669YlkcZihlXFbnXhH2krdrvDry8q5pcUGfoqkg_R6Y,6243
alembic/ddl/postgresql.py,sha256=S7uye2NDSHLwV3w8SJ2Q9DLbcvQIxQfJ3EEK6JqyNag,29950
alembic/ddl/sqlite.py,sha256=u5tJgRUiY6bzVltl_NWlI6cy23v8XNagk_9gPI6Lnns,8006
alembic/environment.py,sha256=MM5lPayGT04H3aeng1H7GQ8HEAs3VGX5yy6mDLCPLT4,43
alembic/migration.py,sha256=MV6Fju6rZtn2fTREKzXrCZM6aIBGII4OMZFix0X-GLs,41
alembic/op.py,sha256=flHtcsVqOD-ZgZKK2pv-CJ5Cwh-KJ7puMUNXzishxLw,167
alembic/op.pyi,sha256=PQ4mKNp7EXrjVdIWQRoGiBSVke4PPxTc9I6qF8ZGGZE,50711
alembic/operations/__init__.py,sha256=e0KQSZAgLpTWvyvreB7DWg7RJV_MWSOPVDgCqsd2FzY,318
alembic/operations/__pycache__/__init__.cpython-312.pyc,,
alembic/operations/__pycache__/base.cpython-312.pyc,,
alembic/operations/__pycache__/batch.cpython-312.pyc,,
alembic/operations/__pycache__/ops.cpython-312.pyc,,
alembic/operations/__pycache__/schemaobj.cpython-312.pyc,,
alembic/operations/__pycache__/toimpl.cpython-312.pyc,,
alembic/operations/base.py,sha256=npw1iFboTlEsaQS0b7mb2SEHsRDV4GLQqnjhcfma6Nk,75157
alembic/operations/batch.py,sha256=1UmCFcsFWObinQWFRWoGZkjynl54HKpldbPs67aR4wg,26923
alembic/operations/ops.py,sha256=ftsFgcZIctxRDiuGgkQsaFHsMlRP7cLq7Dj_seKVBnQ,96276
alembic/operations/schemaobj.py,sha256=Wp-bBe4a8lXPTvIHJttBY0ejtpVR5Jvtb2kI-U2PztQ,9468
alembic/operations/toimpl.py,sha256=rgufuSUNwpgrOYzzY3Q3ELW1rQv2fQbQVokXgnIYIrs,7503
alembic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
alembic/runtime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
alembic/runtime/__pycache__/__init__.cpython-312.pyc,,
alembic/runtime/__pycache__/environment.cpython-312.pyc,,
alembic/runtime/__pycache__/migration.cpython-312.pyc,,
alembic/runtime/environment.py,sha256=L6bDW1dvw8L4zwxlTG8KnT0xcCgLXxUfdRpzqlJoFjo,41479
alembic/runtime/migration.py,sha256=lu9_z_qyWmNzSM52_FgdXP_G52PTmTTeOeMBQAkQTFg,49997
alembic/script/__init__.py,sha256=lSj06O391Iy5avWAiq8SPs6N8RBgxkSPjP8wpXcNDGg,100
alembic/script/__pycache__/__init__.cpython-312.pyc,,
alembic/script/__pycache__/base.cpython-312.pyc,,
alembic/script/__pycache__/revision.cpython-312.pyc,,
alembic/script/__pycache__/write_hooks.cpython-312.pyc,,
alembic/script/base.py,sha256=4jINClsNNwQIvnf4Kwp9JPAMrANLXdLItylXmcMqAkI,36896
alembic/script/revision.py,sha256=BQcJoMCIXtSJRLCvdasgLOtCx9O7A8wsSym1FsqLW4s,62307
alembic/script/write_hooks.py,sha256=uQWAtguSCrxU_k9d87NX19y6EzyjJRRQ5HS9cyPnK9o,5092
alembic/templates/async/README,sha256=ISVtAOvqvKk_5ThM5ioJE-lMkvf9IbknFUFVU_vPma4,58
alembic/templates/async/__pycache__/env.cpython-312.pyc,,
alembic/templates/async/alembic.ini.mako,sha256=Bgi4WkaHYsT7xvsX-4WOGkcXKFroNoQLaUvZA23ZwGs,4864
alembic/templates/async/env.py,sha256=zbOCf3Y7w2lg92hxSwmG1MM_7y56i_oRH4AKp0pQBYo,2389
alembic/templates/async/script.py.mako,sha256=04kgeBtNMa4cCnG8CfQcKt6P6rnloIfj8wy0u_DBydM,704
alembic/templates/generic/README,sha256=MVlc9TYmr57RbhXET6QxgyCcwWP7w-vLkEsirENqiIQ,38
alembic/templates/generic/__pycache__/env.cpython-312.pyc,,
alembic/templates/generic/alembic.ini.mako,sha256=LCpLL02bi9Qr3KRTEj9NbQqAu0ckUmYBwPtrMtQkv-Y,4864
alembic/templates/generic/env.py,sha256=TLRWOVW3Xpt_Tpf8JFzlnoPn_qoUu8UV77Y4o9XD6yI,2103
alembic/templates/generic/script.py.mako,sha256=04kgeBtNMa4cCnG8CfQcKt6P6rnloIfj8wy0u_DBydM,704
alembic/templates/multidb/README,sha256=dWLDhnBgphA4Nzb7sNlMfCS3_06YqVbHhz-9O5JNqyI,606
alembic/templates/multidb/__pycache__/env.cpython-312.pyc,,
alembic/templates/multidb/alembic.ini.mako,sha256=rIp1LTdE1xcoFT2G7X72KshzYjUTRrHTvnkvFL___-8,5190
alembic/templates/multidb/env.py,sha256=6zNjnW8mXGUk7erTsAvrfhvqoczJ-gagjVq1Ypg2YIQ,4230
alembic/templates/multidb/script.py.mako,sha256=ZbCXMkI5Wj2dwNKcxuVGkKZ7Iav93BNx_bM4zbGi3c8,1235
alembic/templates/pyproject/README,sha256=dMhIiFoeM7EdeaOXBs3mVQ6zXACMyGXDb_UBB6sGRA0,60
alembic/templates/pyproject/__pycache__/env.cpython-312.pyc,,
alembic/templates/pyproject/alembic.ini.mako,sha256=bQnEoydnLOUgg9vNbTOys4r5MaW8lmwYFXSrlfdEEkw,782
alembic/templates/pyproject/env.py,sha256=TLRWOVW3Xpt_Tpf8JFzlnoPn_qoUu8UV77Y4o9XD6yI,2103
alembic/templates/pyproject/pyproject.toml.mako,sha256=Gf16ZR9OMG9zDlFO5PVQlfiL1DTKwSA--sTNzK7Lba0,2852
alembic/templates/pyproject/script.py.mako,sha256=04kgeBtNMa4cCnG8CfQcKt6P6rnloIfj8wy0u_DBydM,704
alembic/templates/pyproject_async/README,sha256=2Q5XcEouiqQ-TJssO9805LROkVUd0F6d74rTnuLrifA,45
alembic/templates/pyproject_async/__pycache__/env.cpython-312.pyc,,
alembic/templates/pyproject_async/alembic.ini.mako,sha256=bQnEoydnLOUgg9vNbTOys4r5MaW8lmwYFXSrlfdEEkw,782
alembic/templates/pyproject_async/env.py,sha256=zbOCf3Y7w2lg92hxSwmG1MM_7y56i_oRH4AKp0pQBYo,2389
alembic/templates/pyproject_async/pyproject.toml.mako,sha256=Gf16ZR9OMG9zDlFO5PVQlfiL1DTKwSA--sTNzK7Lba0,2852
alembic/templates/pyproject_async/script.py.mako,sha256=04kgeBtNMa4cCnG8CfQcKt6P6rnloIfj8wy0u_DBydM,704
alembic/testing/__init__.py,sha256=PTMhi_2PZ1T_3atQS2CIr0V4YRZzx_doKI-DxKdQS44,1297
alembic/testing/__pycache__/__init__.cpython-312.pyc,,
alembic/testing/__pycache__/assertions.cpython-312.pyc,,
alembic/testing/__pycache__/env.cpython-312.pyc,,
alembic/testing/__pycache__/fixtures.cpython-312.pyc,,
alembic/testing/__pycache__/requirements.cpython-312.pyc,,
alembic/testing/__pycache__/schemacompare.cpython-312.pyc,,
alembic/testing/__pycache__/util.cpython-312.pyc,,
alembic/testing/__pycache__/warnings.cpython-312.pyc,,
alembic/testing/assertions.py,sha256=qcqf3tRAUe-A12NzuK_yxlksuX9OZKRC5E8pKIdBnPg,5302
alembic/testing/env.py,sha256=pka7fjwOC8hYL6X0XE4oPkJpy_1WX01bL7iP7gpO_4I,11551
alembic/testing/fixtures.py,sha256=fOzsRF8SW6CWpAH0sZpUHcgsJjun9EHnp4k2S3Lq5eU,9920
alembic/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc,,
alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,,
alembic/testing/plugin/bootstrap.py,sha256=9C6wtjGrIVztZ928w27hsQE0KcjDLIUtUN3dvZKsMVk,50
alembic/testing/requirements.py,sha256=gNnnvgPCuiqKeHmiNymdQuYIjQ0BrxiPxu_in4eHEsc,4180
alembic/testing/schemacompare.py,sha256=N5UqSNCOJetIKC4vKhpYzQEpj08XkdgIoqBmEPQ3tlc,4838
alembic/testing/suite/__init__.py,sha256=MvE7-hwbaVN1q3NM-ztGxORU9dnIelUCINKqNxewn7Y,288
alembic/testing/suite/__pycache__/__init__.cpython-312.pyc,,
alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc,,
alembic/testing/suite/__pycache__/test_op.cpython-312.pyc,,
alembic/testing/suite/_autogen_fixtures.py,sha256=Drrz_FKb9KDjq8hkwxtPkJVY1sCY7Biw-Muzb8kANp8,13480
alembic/testing/suite/test_autogen_comments.py,sha256=aEGqKUDw4kHjnDk298aoGcQvXJWmZXcIX_2FxH4cJK8,6283
alembic/testing/suite/test_autogen_computed.py,sha256=-5wran56qXo3afAbSk8cuSDDpbQweyJ61RF-GaVuZbA,4126
alembic/testing/suite/test_autogen_diffs.py,sha256=T4SR1n_kmcOKYhR4W1-dA0e5sddJ69DSVL2HW96kAkE,8394
alembic/testing/suite/test_autogen_fks.py,sha256=AqFmb26Buex167HYa9dZWOk8x-JlB1OK3bwcvvjDFaU,32927
alembic/testing/suite/test_autogen_identity.py,sha256=kcuqngG7qXAKPJDX4U8sRzPKHEJECHuZ0DtuaS6tVkk,5824
alembic/testing/suite/test_environment.py,sha256=OwD-kpESdLoc4byBrGrXbZHvqtPbzhFCG4W9hJOJXPQ,11877
alembic/testing/suite/test_op.py,sha256=2XQCdm_NmnPxHGuGj7hmxMzIhKxXNotUsKdACXzE1mM,1343
alembic/testing/util.py,sha256=CQrcQDA8fs_7ME85z5ydb-Bt70soIIID-qNY1vbR2dg,3350
alembic/testing/warnings.py,sha256=cDDWzvxNZE6x9dME2ACTXSv01G81JcIbE1GIE_s1kvg,831
alembic/util/__init__.py,sha256=_Zj_xp6ssKLyoLHUFzmKhnc8mhwXW8D8h7qyX-wO56M,1519
alembic/util/__pycache__/__init__.cpython-312.pyc,,
alembic/util/__pycache__/compat.cpython-312.pyc,,
alembic/util/__pycache__/editor.cpython-312.pyc,,
alembic/util/__pycache__/exc.cpython-312.pyc,,
alembic/util/__pycache__/langhelpers.cpython-312.pyc,,
alembic/util/__pycache__/messaging.cpython-312.pyc,,
alembic/util/__pycache__/pyfiles.cpython-312.pyc,,
alembic/util/__pycache__/sqla_compat.cpython-312.pyc,,
alembic/util/compat.py,sha256=Vt5xCn5Y675jI4seKNBV4IVnCl9V4wyH3OBI2w7U0EY,4248
alembic/util/editor.py,sha256=JIz6_BdgV8_oKtnheR6DZoB7qnrHrlRgWjx09AsTsUw,2546
alembic/util/exc.py,sha256=ZBlTQ8g-Jkb1iYFhFHs9djilRz0SSQ0Foc5SSoENs5o,564
alembic/util/langhelpers.py,sha256=LpOcovnhMnP45kTt8zNJ4BHpyQrlF40OL6yDXjqKtsE,10026
alembic/util/messaging.py,sha256=3bEBoDy4EAXETXAvArlYjeMITXDTgPTu6ZoE3ytnzSw,3294
alembic/util/pyfiles.py,sha256=kOBjZEytRkBKsQl0LAj2sbKJMQazjwQ_5UeMKSIvVFo,4730
alembic/util/sqla_compat.py,sha256=9OYPTf-GCultAIuv1PoiaqYXAApZQxUOqjrOaeJDAik,14790

View File

@@ -1,4 +1,6 @@
import sys
from . import context
from . import op
__version__ = "1.16.5"
__version__ = "1.12.1"

View File

@@ -1,10 +1,10 @@
from .api import _render_migration_diffs as _render_migration_diffs
from .api import compare_metadata as compare_metadata
from .api import produce_migrations as produce_migrations
from .api import render_python_code as render_python_code
from .api import RevisionContext as RevisionContext
from .compare import _produce_net_changes as _produce_net_changes
from .compare import comparators as comparators
from .render import render_op_text as render_op_text
from .render import renderers as renderers
from .rewriter import Rewriter as Rewriter
from .api import _render_migration_diffs
from .api import compare_metadata
from .api import produce_migrations
from .api import render_python_code
from .api import RevisionContext
from .compare import _produce_net_changes
from .compare import comparators
from .render import render_op_text
from .render import renderers
from .rewriter import Rewriter

View File

@@ -17,7 +17,6 @@ from . import compare
from . import render
from .. import util
from ..operations import ops
from ..util import sqla_compat
"""Provide the 'autogenerate' feature which can produce migration operations
automatically."""
@@ -28,7 +27,6 @@ if TYPE_CHECKING:
from sqlalchemy.engine import Inspector
from sqlalchemy.sql.schema import MetaData
from sqlalchemy.sql.schema import SchemaItem
from sqlalchemy.sql.schema import Table
from ..config import Config
from ..operations.ops import DowngradeOps
@@ -166,7 +164,6 @@ def compare_metadata(context: MigrationContext, metadata: MetaData) -> Any:
"""
migration_script = produce_migrations(context, metadata)
assert migration_script.upgrade_ops is not None
return migration_script.upgrade_ops.as_diffs()
@@ -277,7 +274,7 @@ class AutogenContext:
"""Maintains configuration and state that's specific to an
autogenerate operation."""
metadata: Union[MetaData, Sequence[MetaData], None] = None
metadata: Optional[MetaData] = None
"""The :class:`~sqlalchemy.schema.MetaData` object
representing the destination.
@@ -332,8 +329,8 @@ class AutogenContext:
def __init__(
self,
migration_context: MigrationContext,
metadata: Union[MetaData, Sequence[MetaData], None] = None,
opts: Optional[Dict[str, Any]] = None,
metadata: Optional[MetaData] = None,
opts: Optional[dict] = None,
autogenerate: bool = True,
) -> None:
if (
@@ -443,7 +440,7 @@ class AutogenContext:
def run_object_filters(
self,
object_: SchemaItem,
name: sqla_compat._ConstraintName,
name: Optional[str],
type_: NameFilterType,
reflected: bool,
compare_to: Optional[SchemaItem],
@@ -467,7 +464,7 @@ class AutogenContext:
run_filters = run_object_filters
@util.memoized_property
def sorted_tables(self) -> List[Table]:
def sorted_tables(self):
"""Return an aggregate of the :attr:`.MetaData.sorted_tables`
collection(s).
@@ -483,7 +480,7 @@ class AutogenContext:
return result
@util.memoized_property
def table_key_to_table(self) -> Dict[str, Table]:
def table_key_to_table(self):
"""Return an aggregate of the :attr:`.MetaData.tables` dictionaries.
The :attr:`.MetaData.tables` collection is a dictionary of table key
@@ -494,7 +491,7 @@ class AutogenContext:
objects contain the same table key, an exception is raised.
"""
result: Dict[str, Table] = {}
result = {}
for m in util.to_list(self.metadata):
intersect = set(result).intersection(set(m.tables))
if intersect:
@@ -596,9 +593,9 @@ class RevisionContext:
migration_script = self.generated_revisions[-1]
if not getattr(migration_script, "_needs_render", False):
migration_script.upgrade_ops_list[-1].upgrade_token = upgrade_token
migration_script.downgrade_ops_list[-1].downgrade_token = (
downgrade_token
)
migration_script.downgrade_ops_list[
-1
].downgrade_token = downgrade_token
migration_script._needs_render = True
else:
migration_script._upgrade_ops.append(

View File

@@ -1,6 +1,3 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
import contextlib
@@ -10,12 +7,12 @@ from typing import Any
from typing import cast
from typing import Dict
from typing import Iterator
from typing import List
from typing import Mapping
from typing import Optional
from typing import Set
from typing import Tuple
from typing import TYPE_CHECKING
from typing import TypeVar
from typing import Union
from sqlalchemy import event
@@ -24,15 +21,10 @@ from sqlalchemy import schema as sa_schema
from sqlalchemy import text
from sqlalchemy import types as sqltypes
from sqlalchemy.sql import expression
from sqlalchemy.sql.elements import conv
from sqlalchemy.sql.schema import ForeignKeyConstraint
from sqlalchemy.sql.schema import Index
from sqlalchemy.sql.schema import UniqueConstraint
from sqlalchemy.util import OrderedSet
from alembic.ddl.base import _fk_spec
from .. import util
from ..ddl._autogen import is_index_sig
from ..ddl._autogen import is_uq_sig
from ..operations import ops
from ..util import sqla_compat
@@ -43,7 +35,10 @@ if TYPE_CHECKING:
from sqlalchemy.sql.elements import quoted_name
from sqlalchemy.sql.elements import TextClause
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.schema import ForeignKeyConstraint
from sqlalchemy.sql.schema import Index
from sqlalchemy.sql.schema import Table
from sqlalchemy.sql.schema import UniqueConstraint
from alembic.autogenerate.api import AutogenContext
from alembic.ddl.impl import DefaultImpl
@@ -51,8 +46,6 @@ if TYPE_CHECKING:
from alembic.operations.ops import MigrationScript
from alembic.operations.ops import ModifyTableOps
from alembic.operations.ops import UpgradeOps
from ..ddl._autogen import _constraint_sig
log = logging.getLogger(__name__)
@@ -217,7 +210,7 @@ def _compare_tables(
(inspector),
# fmt: on
)
_InspectorConv(inspector).reflect_table(t, include_columns=None)
sqla_compat._reflect_table(inspector, t)
if autogen_context.run_object_filters(t, tname, "table", True, None):
modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
@@ -247,8 +240,7 @@ def _compare_tables(
_compat_autogen_column_reflect(inspector),
# fmt: on
)
_InspectorConv(inspector).reflect_table(t, include_columns=None)
sqla_compat._reflect_table(inspector, t)
conn_column_info[(s, tname)] = t
for s, tname in sorted(existing_tables, key=lambda x: (x[0] or "", x[1])):
@@ -437,56 +429,102 @@ def _compare_columns(
log.info("Detected removed column '%s.%s'", name, cname)
_C = TypeVar("_C", bound=Union[UniqueConstraint, ForeignKeyConstraint, Index])
class _constraint_sig:
const: Union[UniqueConstraint, ForeignKeyConstraint, Index]
class _InspectorConv:
__slots__ = ("inspector",)
def __init__(self, inspector):
self.inspector = inspector
def _apply_reflectinfo_conv(self, consts):
if not consts:
return consts
for const in consts:
if const["name"] is not None and not isinstance(
const["name"], conv
):
const["name"] = conv(const["name"])
return consts
def _apply_constraint_conv(self, consts):
if not consts:
return consts
for const in consts:
if const.name is not None and not isinstance(const.name, conv):
const.name = conv(const.name)
return consts
def get_indexes(self, *args, **kw):
return self._apply_reflectinfo_conv(
self.inspector.get_indexes(*args, **kw)
def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]:
return sqla_compat._get_constraint_final_name(
self.const, context.dialect
)
def get_unique_constraints(self, *args, **kw):
return self._apply_reflectinfo_conv(
self.inspector.get_unique_constraints(*args, **kw)
def __eq__(self, other):
return self.const == other.const
def __ne__(self, other):
return self.const != other.const
def __hash__(self) -> int:
return hash(self.const)
class _uq_constraint_sig(_constraint_sig):
is_index = False
is_unique = True
def __init__(self, const: UniqueConstraint, impl: DefaultImpl) -> None:
self.const = const
self.name = const.name
self.sig = ("UNIQUE_CONSTRAINT",) + impl.create_unique_constraint_sig(
const
)
def get_foreign_keys(self, *args, **kw):
return self._apply_reflectinfo_conv(
self.inspector.get_foreign_keys(*args, **kw)
@property
def column_names(self) -> List[str]:
return [col.name for col in self.const.columns]
class _ix_constraint_sig(_constraint_sig):
is_index = True
def __init__(self, const: Index, impl: DefaultImpl) -> None:
self.const = const
self.name = const.name
self.sig = ("INDEX",) + impl.create_index_sig(const)
self.is_unique = bool(const.unique)
def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]:
return sqla_compat._get_constraint_final_name(
self.const, context.dialect
)
def reflect_table(self, table, *, include_columns):
self.inspector.reflect_table(table, include_columns=include_columns)
@property
def column_names(self) -> Union[List[quoted_name], List[None]]:
return sqla_compat._get_index_column_names(self.const)
# I had a cool version of this using _ReflectInfo, however that doesn't
# work in 1.4 and it's not public API in 2.x. Then this is just a two
# liner. So there's no competition...
self._apply_constraint_conv(table.constraints)
self._apply_constraint_conv(table.indexes)
class _fk_constraint_sig(_constraint_sig):
def __init__(
self, const: ForeignKeyConstraint, include_options: bool = False
) -> None:
self.const = const
self.name = const.name
(
self.source_schema,
self.source_table,
self.source_columns,
self.target_schema,
self.target_table,
self.target_columns,
onupdate,
ondelete,
deferrable,
initially,
) = _fk_spec(const)
self.sig: Tuple[Any, ...] = (
self.source_schema,
self.source_table,
tuple(self.source_columns),
self.target_schema,
self.target_table,
tuple(self.target_columns),
)
if include_options:
self.sig += (
(None if onupdate.lower() == "no action" else onupdate.lower())
if onupdate
else None,
(None if ondelete.lower() == "no action" else ondelete.lower())
if ondelete
else None,
# convert initially + deferrable into one three-state value
"initially_deferrable"
if initially and initially.lower() == "deferred"
else "deferrable"
if deferrable
else "not deferrable",
)
@comparators.dispatch_for("table")
@@ -523,34 +561,34 @@ def _compare_indexes_and_uniques(
if conn_table is not None:
# 1b. ... and from connection, if the table exists
try:
conn_uniques = _InspectorConv(inspector).get_unique_constraints(
tname, schema=schema
)
supports_unique_constraints = True
except NotImplementedError:
pass
except TypeError:
# number of arguments is off for the base
# method in SQLAlchemy due to the cache decorator
# not being present
pass
else:
conn_uniques = [ # type:ignore[assignment]
uq
for uq in conn_uniques
if autogen_context.run_name_filters(
uq["name"],
"unique_constraint",
{"table_name": tname, "schema_name": schema},
if hasattr(inspector, "get_unique_constraints"):
try:
conn_uniques = inspector.get_unique_constraints( # type:ignore[assignment] # noqa
tname, schema=schema
)
]
for uq in conn_uniques:
if uq.get("duplicates_index"):
unique_constraints_duplicate_unique_indexes = True
supports_unique_constraints = True
except NotImplementedError:
pass
except TypeError:
# number of arguments is off for the base
# method in SQLAlchemy due to the cache decorator
# not being present
pass
else:
conn_uniques = [ # type:ignore[assignment]
uq
for uq in conn_uniques
if autogen_context.run_name_filters(
uq["name"],
"unique_constraint",
{"table_name": tname, "schema_name": schema},
)
]
for uq in conn_uniques:
if uq.get("duplicates_index"):
unique_constraints_duplicate_unique_indexes = True
try:
conn_indexes = _InspectorConv(inspector).get_indexes(
conn_indexes = inspector.get_indexes( # type:ignore[assignment]
tname, schema=schema
)
except NotImplementedError:
@@ -601,7 +639,7 @@ def _compare_indexes_and_uniques(
# 3. give the dialect a chance to omit indexes and constraints that
# we know are either added implicitly by the DB or that the DB
# can't accurately report on
impl.correct_for_autogen_constraints(
autogen_context.migration_context.impl.correct_for_autogen_constraints(
conn_uniques, # type: ignore[arg-type]
conn_indexes, # type: ignore[arg-type]
metadata_unique_constraints,
@@ -613,31 +651,31 @@ def _compare_indexes_and_uniques(
# Index and UniqueConstraint so we can easily work with them
# interchangeably
metadata_unique_constraints_sig = {
impl._create_metadata_constraint_sig(uq)
for uq in metadata_unique_constraints
_uq_constraint_sig(uq, impl) for uq in metadata_unique_constraints
}
metadata_indexes_sig = {
impl._create_metadata_constraint_sig(ix) for ix in metadata_indexes
_ix_constraint_sig(ix, impl) for ix in metadata_indexes
}
conn_unique_constraints = {
impl._create_reflected_constraint_sig(uq) for uq in conn_uniques
_uq_constraint_sig(uq, impl) for uq in conn_uniques
}
conn_indexes_sig = {
impl._create_reflected_constraint_sig(ix) for ix in conn_indexes
}
conn_indexes_sig = {_ix_constraint_sig(ix, impl) for ix in conn_indexes}
# 5. index things by name, for those objects that have names
metadata_names = {
cast(str, c.md_name_to_sql_name(autogen_context)): c
for c in metadata_unique_constraints_sig.union(metadata_indexes_sig)
if c.is_named
for c in metadata_unique_constraints_sig.union(
metadata_indexes_sig # type:ignore[arg-type]
)
if isinstance(c, _ix_constraint_sig)
or sqla_compat._constraint_is_named(c.const, autogen_context.dialect)
}
conn_uniques_by_name: Dict[sqla_compat._ConstraintName, _constraint_sig]
conn_indexes_by_name: Dict[sqla_compat._ConstraintName, _constraint_sig]
conn_uniques_by_name: Dict[sqla_compat._ConstraintName, _uq_constraint_sig]
conn_indexes_by_name: Dict[sqla_compat._ConstraintName, _ix_constraint_sig]
conn_uniques_by_name = {c.name: c for c in conn_unique_constraints}
conn_indexes_by_name = {c.name: c for c in conn_indexes_sig}
@@ -656,12 +694,13 @@ def _compare_indexes_and_uniques(
# 6. index things by "column signature", to help with unnamed unique
# constraints.
conn_uniques_by_sig = {uq.unnamed: uq for uq in conn_unique_constraints}
conn_uniques_by_sig = {uq.sig: uq for uq in conn_unique_constraints}
metadata_uniques_by_sig = {
uq.unnamed: uq for uq in metadata_unique_constraints_sig
uq.sig: uq for uq in metadata_unique_constraints_sig
}
metadata_indexes_by_sig = {ix.sig: ix for ix in metadata_indexes_sig}
unnamed_metadata_uniques = {
uq.unnamed: uq
uq.sig: uq
for uq in metadata_unique_constraints_sig
if not sqla_compat._constraint_is_named(
uq.const, autogen_context.dialect
@@ -676,18 +715,18 @@ def _compare_indexes_and_uniques(
# 4. The backend may double up indexes as unique constraints and
# vice versa (e.g. MySQL, Postgresql)
def obj_added(obj: _constraint_sig):
if is_index_sig(obj):
def obj_added(obj):
if obj.is_index:
if autogen_context.run_object_filters(
obj.const, obj.name, "index", False, None
):
modify_ops.ops.append(ops.CreateIndexOp.from_index(obj.const))
log.info(
"Detected added index %r on '%s'",
"Detected added index '%s' on %s",
obj.name,
obj.column_names,
", ".join(["'%s'" % obj.column_names]),
)
elif is_uq_sig(obj):
else:
if not supports_unique_constraints:
# can't report unique indexes as added if we don't
# detect them
@@ -702,15 +741,13 @@ def _compare_indexes_and_uniques(
ops.AddConstraintOp.from_constraint(obj.const)
)
log.info(
"Detected added unique constraint %r on '%s'",
"Detected added unique constraint '%s' on %s",
obj.name,
obj.column_names,
", ".join(["'%s'" % obj.column_names]),
)
else:
assert False
def obj_removed(obj: _constraint_sig):
if is_index_sig(obj):
def obj_removed(obj):
if obj.is_index:
if obj.is_unique and not supports_unique_constraints:
# many databases double up unique constraints
# as unique indexes. without that list we can't
@@ -721,8 +758,10 @@ def _compare_indexes_and_uniques(
obj.const, obj.name, "index", True, None
):
modify_ops.ops.append(ops.DropIndexOp.from_index(obj.const))
log.info("Detected removed index %r on %r", obj.name, tname)
elif is_uq_sig(obj):
log.info(
"Detected removed index '%s' on '%s'", obj.name, tname
)
else:
if is_create_table or is_drop_table:
# if the whole table is being dropped, we don't need to
# consider unique constraint separately
@@ -734,40 +773,33 @@ def _compare_indexes_and_uniques(
ops.DropConstraintOp.from_constraint(obj.const)
)
log.info(
"Detected removed unique constraint %r on %r",
"Detected removed unique constraint '%s' on '%s'",
obj.name,
tname,
)
else:
assert False
def obj_changed(
old: _constraint_sig,
new: _constraint_sig,
msg: str,
):
if is_index_sig(old):
assert is_index_sig(new)
def obj_changed(old, new, msg):
if old.is_index:
if autogen_context.run_object_filters(
new.const, new.name, "index", False, old.const
):
log.info(
"Detected changed index %r on %r: %s", old.name, tname, msg
"Detected changed index '%s' on '%s':%s",
old.name,
tname,
", ".join(msg),
)
modify_ops.ops.append(ops.DropIndexOp.from_index(old.const))
modify_ops.ops.append(ops.CreateIndexOp.from_index(new.const))
elif is_uq_sig(old):
assert is_uq_sig(new)
else:
if autogen_context.run_object_filters(
new.const, new.name, "unique_constraint", False, old.const
):
log.info(
"Detected changed unique constraint %r on %r: %s",
"Detected changed unique constraint '%s' on '%s':%s",
old.name,
tname,
msg,
", ".join(msg),
)
modify_ops.ops.append(
ops.DropConstraintOp.from_constraint(old.const)
@@ -775,24 +807,18 @@ def _compare_indexes_and_uniques(
modify_ops.ops.append(
ops.AddConstraintOp.from_constraint(new.const)
)
else:
assert False
for removed_name in sorted(set(conn_names).difference(metadata_names)):
conn_obj = conn_names[removed_name]
if (
is_uq_sig(conn_obj)
and conn_obj.unnamed in unnamed_metadata_uniques
):
conn_obj: Union[_ix_constraint_sig, _uq_constraint_sig] = conn_names[
removed_name
]
if not conn_obj.is_index and conn_obj.sig in unnamed_metadata_uniques:
continue
elif removed_name in doubled_constraints:
conn_uq, conn_idx = doubled_constraints[removed_name]
if (
all(
conn_idx.unnamed != meta_idx.unnamed
for meta_idx in metadata_indexes_sig
)
and conn_uq.unnamed not in metadata_uniques_by_sig
conn_idx.sig not in metadata_indexes_by_sig
and conn_uq.sig not in metadata_uniques_by_sig
):
obj_removed(conn_uq)
obj_removed(conn_idx)
@@ -804,36 +830,30 @@ def _compare_indexes_and_uniques(
if existing_name in doubled_constraints:
conn_uq, conn_idx = doubled_constraints[existing_name]
if is_index_sig(metadata_obj):
if metadata_obj.is_index:
conn_obj = conn_idx
else:
conn_obj = conn_uq
else:
conn_obj = conn_names[existing_name]
if type(conn_obj) != type(metadata_obj):
if conn_obj.is_index != metadata_obj.is_index:
obj_removed(conn_obj)
obj_added(metadata_obj)
else:
comparison = metadata_obj.compare_to_reflected(conn_obj)
msg = []
if conn_obj.is_unique != metadata_obj.is_unique:
msg.append(
" unique=%r to unique=%r"
% (conn_obj.is_unique, metadata_obj.is_unique)
)
if conn_obj.sig != metadata_obj.sig:
msg.append(
" expression %r to %r" % (conn_obj.sig, metadata_obj.sig)
)
if comparison.is_different:
# constraint are different
obj_changed(conn_obj, metadata_obj, comparison.message)
elif comparison.is_skip:
# constraint cannot be compared, skip them
thing = (
"index" if is_index_sig(conn_obj) else "unique constraint"
)
log.info(
"Cannot compare %s %r, assuming equal and skipping. %s",
thing,
conn_obj.name,
comparison.message,
)
else:
# constraint are equal
assert comparison.is_equal
if msg:
obj_changed(conn_obj, metadata_obj, msg)
for added_name in sorted(set(metadata_names).difference(conn_names)):
obj = metadata_names[added_name]
@@ -873,7 +893,7 @@ def _correct_for_uq_duplicates_uix(
}
unnamed_metadata_uqs = {
impl._create_metadata_constraint_sig(cons).unnamed
_uq_constraint_sig(cons, impl).sig
for name, cons in metadata_cons_names
if name is None
}
@@ -897,9 +917,7 @@ def _correct_for_uq_duplicates_uix(
for overlap in uqs_dupe_indexes:
if overlap not in metadata_uq_names:
if (
impl._create_reflected_constraint_sig(
uqs_dupe_indexes[overlap]
).unnamed
_uq_constraint_sig(uqs_dupe_indexes[overlap], impl).sig
not in unnamed_metadata_uqs
):
conn_unique_constraints.discard(uqs_dupe_indexes[overlap])
@@ -1035,7 +1053,7 @@ def _normalize_computed_default(sqltext: str) -> str:
"""
return re.sub(r"[ \(\)'\"`\[\]\t\r\n]", "", sqltext).lower()
return re.sub(r"[ \(\)'\"`\[\]]", "", sqltext).lower()
def _compare_computed_default(
@@ -1119,15 +1137,27 @@ def _compare_server_default(
return False
if sqla_compat._server_default_is_computed(metadata_default):
return _compare_computed_default( # type:ignore[func-returns-value]
autogen_context,
alter_column_op,
schema,
tname,
cname,
conn_col,
metadata_col,
)
# return False in case of a computed column as the server
# default. Note that DDL for adding or removing "GENERATED AS" from
# an existing column is not currently known for any backend.
# Once SQLAlchemy can reflect "GENERATED" as the "computed" element,
# we would also want to ignore and/or warn for changes vs. the
# metadata (or support backend specific DDL if applicable).
if not sqla_compat.has_computed_reflection:
return False
else:
return (
_compare_computed_default( # type:ignore[func-returns-value]
autogen_context,
alter_column_op,
schema,
tname,
cname,
conn_col,
metadata_col,
)
)
if sqla_compat._server_default_is_computed(conn_col_default):
_warn_computed_not_supported(tname, cname)
return False
@@ -1213,8 +1243,8 @@ def _compare_foreign_keys(
modify_table_ops: ModifyTableOps,
schema: Optional[str],
tname: Union[quoted_name, str],
conn_table: Table,
metadata_table: Table,
conn_table: Optional[Table],
metadata_table: Optional[Table],
) -> None:
# if we're doing CREATE TABLE, all FKs are created
# inline within the table def
@@ -1230,9 +1260,7 @@ def _compare_foreign_keys(
conn_fks_list = [
fk
for fk in _InspectorConv(inspector).get_foreign_keys(
tname, schema=schema
)
for fk in inspector.get_foreign_keys(tname, schema=schema)
if autogen_context.run_name_filters(
fk["name"],
"foreign_key_constraint",
@@ -1240,11 +1268,14 @@ def _compare_foreign_keys(
)
]
conn_fks = {
_make_foreign_key(const, conn_table) for const in conn_fks_list
}
backend_reflects_fk_options = bool(
conn_fks_list and "options" in conn_fks_list[0]
)
impl = autogen_context.migration_context.impl
conn_fks = {
_make_foreign_key(const, conn_table) # type: ignore[arg-type]
for const in conn_fks_list
}
# give the dialect a chance to correct the FKs to match more
# closely
@@ -1253,24 +1284,17 @@ def _compare_foreign_keys(
)
metadata_fks_sig = {
impl._create_metadata_constraint_sig(fk) for fk in metadata_fks
_fk_constraint_sig(fk, include_options=backend_reflects_fk_options)
for fk in metadata_fks
}
conn_fks_sig = {
impl._create_reflected_constraint_sig(fk) for fk in conn_fks
_fk_constraint_sig(fk, include_options=backend_reflects_fk_options)
for fk in conn_fks
}
# check if reflected FKs include options, indicating the backend
# can reflect FK options
if conn_fks_list and "options" in conn_fks_list[0]:
conn_fks_by_sig = {c.unnamed: c for c in conn_fks_sig}
metadata_fks_by_sig = {c.unnamed: c for c in metadata_fks_sig}
else:
# otherwise compare by sig without options added
conn_fks_by_sig = {c.unnamed_no_options: c for c in conn_fks_sig}
metadata_fks_by_sig = {
c.unnamed_no_options: c for c in metadata_fks_sig
}
conn_fks_by_sig = {c.sig: c for c in conn_fks_sig}
metadata_fks_by_sig = {c.sig: c for c in metadata_fks_sig}
metadata_fks_by_name = {
c.name: c for c in metadata_fks_sig if c.name is not None

View File

@@ -1,6 +1,3 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
from io import StringIO
@@ -18,9 +15,7 @@ from mako.pygen import PythonPrinter
from sqlalchemy import schema as sa_schema
from sqlalchemy import sql
from sqlalchemy import types as sqltypes
from sqlalchemy.sql.base import _DialectArgView
from sqlalchemy.sql.elements import conv
from sqlalchemy.sql.elements import Label
from sqlalchemy.sql.elements import quoted_name
from .. import util
@@ -30,8 +25,7 @@ from ..util import sqla_compat
if TYPE_CHECKING:
from typing import Literal
from sqlalchemy import Computed
from sqlalchemy import Identity
from sqlalchemy.sql.base import DialectKWArgs
from sqlalchemy.sql.elements import ColumnElement
from sqlalchemy.sql.elements import TextClause
from sqlalchemy.sql.schema import CheckConstraint
@@ -51,6 +45,8 @@ if TYPE_CHECKING:
from alembic.config import Config
from alembic.operations.ops import MigrationScript
from alembic.operations.ops import ModifyTableOps
from alembic.util.sqla_compat import Computed
from alembic.util.sqla_compat import Identity
MAX_PYTHON_ARGS = 255
@@ -168,31 +164,21 @@ def _render_modify_table(
def _render_create_table_comment(
autogen_context: AutogenContext, op: ops.CreateTableCommentOp
) -> str:
if autogen_context._has_batch:
templ = (
"{prefix}create_table_comment(\n"
"{indent}{comment},\n"
"{indent}existing_comment={existing}\n"
")"
)
else:
templ = (
"{prefix}create_table_comment(\n"
"{indent}'{tname}',\n"
"{indent}{comment},\n"
"{indent}existing_comment={existing},\n"
"{indent}schema={schema}\n"
")"
)
templ = (
"{prefix}create_table_comment(\n"
"{indent}'{tname}',\n"
"{indent}{comment},\n"
"{indent}existing_comment={existing},\n"
"{indent}schema={schema}\n"
")"
)
return templ.format(
prefix=_alembic_autogenerate_prefix(autogen_context),
tname=op.table_name,
comment="%r" % op.comment if op.comment is not None else None,
existing=(
"%r" % op.existing_comment
if op.existing_comment is not None
else None
),
existing="%r" % op.existing_comment
if op.existing_comment is not None
else None,
schema="'%s'" % op.schema if op.schema is not None else None,
indent=" ",
)
@@ -202,28 +188,19 @@ def _render_create_table_comment(
def _render_drop_table_comment(
autogen_context: AutogenContext, op: ops.DropTableCommentOp
) -> str:
if autogen_context._has_batch:
templ = (
"{prefix}drop_table_comment(\n"
"{indent}existing_comment={existing}\n"
")"
)
else:
templ = (
"{prefix}drop_table_comment(\n"
"{indent}'{tname}',\n"
"{indent}existing_comment={existing},\n"
"{indent}schema={schema}\n"
")"
)
templ = (
"{prefix}drop_table_comment(\n"
"{indent}'{tname}',\n"
"{indent}existing_comment={existing},\n"
"{indent}schema={schema}\n"
")"
)
return templ.format(
prefix=_alembic_autogenerate_prefix(autogen_context),
tname=op.table_name,
existing=(
"%r" % op.existing_comment
if op.existing_comment is not None
else None
),
existing="%r" % op.existing_comment
if op.existing_comment is not None
else None,
schema="'%s'" % op.schema if op.schema is not None else None,
indent=" ",
)
@@ -280,9 +257,6 @@ def _add_table(autogen_context: AutogenContext, op: ops.CreateTableOp) -> str:
prefixes = ", ".join("'%s'" % p for p in table._prefixes)
text += ",\nprefixes=[%s]" % prefixes
if op.if_not_exists is not None:
text += ",\nif_not_exists=%r" % bool(op.if_not_exists)
text += "\n)"
return text
@@ -295,20 +269,16 @@ def _drop_table(autogen_context: AutogenContext, op: ops.DropTableOp) -> str:
}
if op.schema:
text += ", schema=%r" % _ident(op.schema)
if op.if_exists is not None:
text += ", if_exists=%r" % bool(op.if_exists)
text += ")"
return text
def _render_dialect_kwargs_items(
autogen_context: AutogenContext, dialect_kwargs: _DialectArgView
autogen_context: AutogenContext, item: DialectKWArgs
) -> list[str]:
return [
f"{key}={_render_potential_expr(val, autogen_context)}"
for key, val in dialect_kwargs.items()
for key, val in item.dialect_kwargs.items()
]
@@ -331,9 +301,7 @@ def _add_index(autogen_context: AutogenContext, op: ops.CreateIndexOp) -> str:
assert index.table is not None
opts = _render_dialect_kwargs_items(autogen_context, index.dialect_kwargs)
if op.if_not_exists is not None:
opts.append("if_not_exists=%r" % bool(op.if_not_exists))
opts = _render_dialect_kwargs_items(autogen_context, index)
text = tmpl % {
"prefix": _alembic_autogenerate_prefix(autogen_context),
"name": _render_gen_name(autogen_context, index.name),
@@ -342,11 +310,9 @@ def _add_index(autogen_context: AutogenContext, op: ops.CreateIndexOp) -> str:
_get_index_rendered_expressions(index, autogen_context)
),
"unique": index.unique or False,
"schema": (
(", schema=%r" % _ident(index.table.schema))
if index.table.schema
else ""
),
"schema": (", schema=%r" % _ident(index.table.schema))
if index.table.schema
else "",
"kwargs": ", " + ", ".join(opts) if opts else "",
}
return text
@@ -365,9 +331,7 @@ def _drop_index(autogen_context: AutogenContext, op: ops.DropIndexOp) -> str:
"%(prefix)sdrop_index(%(name)r, "
"table_name=%(table_name)r%(schema)s%(kwargs)s)"
)
opts = _render_dialect_kwargs_items(autogen_context, index.dialect_kwargs)
if op.if_exists is not None:
opts.append("if_exists=%r" % bool(op.if_exists))
opts = _render_dialect_kwargs_items(autogen_context, index)
text = tmpl % {
"prefix": _alembic_autogenerate_prefix(autogen_context),
"name": _render_gen_name(autogen_context, op.index_name),
@@ -389,7 +353,6 @@ def _add_unique_constraint(
def _add_fk_constraint(
autogen_context: AutogenContext, op: ops.CreateForeignKeyOp
) -> str:
constraint = op.to_constraint()
args = [repr(_render_gen_name(autogen_context, op.constraint_name))]
if not autogen_context._has_batch:
args.append(repr(_ident(op.source_table)))
@@ -419,16 +382,9 @@ def _add_fk_constraint(
if value is not None:
args.append("%s=%r" % (k, value))
dialect_kwargs = _render_dialect_kwargs_items(
autogen_context, constraint.dialect_kwargs
)
return "%(prefix)screate_foreign_key(%(args)s%(dialect_kwargs)s)" % {
return "%(prefix)screate_foreign_key(%(args)s)" % {
"prefix": _alembic_autogenerate_prefix(autogen_context),
"args": ", ".join(args),
"dialect_kwargs": (
", " + ", ".join(dialect_kwargs) if dialect_kwargs else ""
),
}
@@ -450,7 +406,7 @@ def _drop_constraint(
name = _render_gen_name(autogen_context, op.constraint_name)
schema = _ident(op.schema) if op.schema else None
type_ = _ident(op.constraint_type) if op.constraint_type else None
if_exists = op.if_exists
params_strs = []
params_strs.append(repr(name))
if not autogen_context._has_batch:
@@ -459,47 +415,32 @@ def _drop_constraint(
params_strs.append(f"schema={schema!r}")
if type_ is not None:
params_strs.append(f"type_={type_!r}")
if if_exists is not None:
params_strs.append(f"if_exists={if_exists}")
return f"{prefix}drop_constraint({', '.join(params_strs)})"
@renderers.dispatch_for(ops.AddColumnOp)
def _add_column(autogen_context: AutogenContext, op: ops.AddColumnOp) -> str:
schema, tname, column, if_not_exists = (
op.schema,
op.table_name,
op.column,
op.if_not_exists,
)
schema, tname, column = op.schema, op.table_name, op.column
if autogen_context._has_batch:
template = "%(prefix)sadd_column(%(column)s)"
else:
template = "%(prefix)sadd_column(%(tname)r, %(column)s"
if schema:
template += ", schema=%(schema)r"
if if_not_exists is not None:
template += ", if_not_exists=%(if_not_exists)r"
template += ")"
text = template % {
"prefix": _alembic_autogenerate_prefix(autogen_context),
"tname": tname,
"column": _render_column(column, autogen_context),
"schema": schema,
"if_not_exists": if_not_exists,
}
return text
@renderers.dispatch_for(ops.DropColumnOp)
def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str:
schema, tname, column_name, if_exists = (
op.schema,
op.table_name,
op.column_name,
op.if_exists,
)
schema, tname, column_name = op.schema, op.table_name, op.column_name
if autogen_context._has_batch:
template = "%(prefix)sdrop_column(%(cname)r)"
@@ -507,8 +448,6 @@ def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str:
template = "%(prefix)sdrop_column(%(tname)r, %(cname)r"
if schema:
template += ", schema=%(schema)r"
if if_exists is not None:
template += ", if_exists=%(if_exists)r"
template += ")"
text = template % {
@@ -516,7 +455,6 @@ def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str:
"tname": _ident(tname),
"cname": _ident(column_name),
"schema": _ident(schema),
"if_exists": if_exists,
}
return text
@@ -531,7 +469,6 @@ def _alter_column(
type_ = op.modify_type
nullable = op.modify_nullable
comment = op.modify_comment
newname = op.modify_name
autoincrement = op.kw.get("autoincrement", None)
existing_type = op.existing_type
existing_nullable = op.existing_nullable
@@ -560,8 +497,6 @@ def _alter_column(
rendered = _render_server_default(server_default, autogen_context)
text += ",\n%sserver_default=%s" % (indent, rendered)
if newname is not None:
text += ",\n%snew_column_name=%r" % (indent, newname)
if type_ is not None:
text += ",\n%stype_=%s" % (indent, _repr_type(type_, autogen_context))
if nullable is not None:
@@ -614,28 +549,23 @@ def _render_potential_expr(
value: Any,
autogen_context: AutogenContext,
*,
wrap_in_element: bool = True,
wrap_in_text: bool = True,
is_server_default: bool = False,
is_index: bool = False,
) -> str:
if isinstance(value, sql.ClauseElement):
sql_text = autogen_context.migration_context.impl.render_ddl_sql_expr(
value, is_server_default=is_server_default, is_index=is_index
)
if wrap_in_element:
prefix = _sqlalchemy_autogenerate_prefix(autogen_context)
element = "literal_column" if is_index else "text"
value_str = f"{prefix}{element}({sql_text!r})"
if (
is_index
and isinstance(value, Label)
and type(value.name) is str
):
return value_str + f".label({value.name!r})"
else:
return value_str
if wrap_in_text:
template = "%(prefix)stext(%(sql)r)"
else:
return repr(sql_text)
template = "%(sql)r"
return template % {
"prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
"sql": autogen_context.migration_context.impl.render_ddl_sql_expr(
value, is_server_default=is_server_default, is_index=is_index
),
}
else:
return repr(value)
@@ -644,11 +574,9 @@ def _get_index_rendered_expressions(
idx: Index, autogen_context: AutogenContext
) -> List[str]:
return [
(
repr(_ident(getattr(exp, "name", None)))
if isinstance(exp, sa_schema.Column)
else _render_potential_expr(exp, autogen_context, is_index=True)
)
repr(_ident(getattr(exp, "name", None)))
if isinstance(exp, sa_schema.Column)
else _render_potential_expr(exp, autogen_context, is_index=True)
for exp in idx.expressions
]
@@ -663,18 +591,16 @@ def _uq_constraint(
has_batch = autogen_context._has_batch
if constraint.deferrable:
opts.append(("deferrable", constraint.deferrable))
opts.append(("deferrable", str(constraint.deferrable)))
if constraint.initially:
opts.append(("initially", constraint.initially))
opts.append(("initially", str(constraint.initially)))
if not has_batch and alter and constraint.table.schema:
opts.append(("schema", _ident(constraint.table.schema)))
if not alter and constraint.name:
opts.append(
("name", _render_gen_name(autogen_context, constraint.name))
)
dialect_options = _render_dialect_kwargs_items(
autogen_context, constraint.dialect_kwargs
)
dialect_options = _render_dialect_kwargs_items(autogen_context, constraint)
if alter:
args = [repr(_render_gen_name(autogen_context, constraint.name))]
@@ -778,7 +704,7 @@ def _render_column(
+ [
"%s=%s"
% (key, _render_potential_expr(val, autogen_context))
for key, val in column.kwargs.items()
for key, val in sqla_compat._column_kwargs(column).items()
]
)
),
@@ -813,8 +739,6 @@ def _render_server_default(
return _render_potential_expr(
default.arg, autogen_context, is_server_default=True
)
elif isinstance(default, sa_schema.FetchedValue):
return _render_fetched_value(autogen_context)
if isinstance(default, str) and repr_:
default = repr(re.sub(r"^'|'$", "", default))
@@ -826,7 +750,7 @@ def _render_computed(
computed: Computed, autogen_context: AutogenContext
) -> str:
text = _render_potential_expr(
computed.sqltext, autogen_context, wrap_in_element=False
computed.sqltext, autogen_context, wrap_in_text=False
)
kwargs = {}
@@ -852,12 +776,6 @@ def _render_identity(
}
def _render_fetched_value(autogen_context: AutogenContext) -> str:
return "%(prefix)sFetchedValue()" % {
"prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
}
def _repr_type(
type_: TypeEngine,
autogen_context: AutogenContext,
@@ -876,10 +794,7 @@ def _repr_type(
mod = type(type_).__module__
imports = autogen_context.imports
if not _skip_variants and sqla_compat._type_has_variants(type_):
return _render_Variant_type(type_, autogen_context)
elif mod.startswith("sqlalchemy.dialects"):
if mod.startswith("sqlalchemy.dialects"):
match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod)
assert match is not None
dname = match.group(1)
@@ -891,6 +806,8 @@ def _repr_type(
return "%s.%r" % (dname, type_)
elif impl_rt:
return impl_rt
elif not _skip_variants and sqla_compat._type_has_variants(type_):
return _render_Variant_type(type_, autogen_context)
elif mod.startswith("sqlalchemy."):
if "_render_%s_type" % type_.__visit_name__ in globals():
fn = globals()["_render_%s_type" % type_.__visit_name__]
@@ -917,7 +834,7 @@ def _render_Variant_type(
) -> str:
base_type, variant_mapping = sqla_compat._get_variant_mapping(type_)
base = _repr_type(base_type, autogen_context, _skip_variants=True)
assert base is not None and base is not False # type: ignore[comparison-overlap] # noqa:E501
assert base is not None and base is not False
for dialect in sorted(variant_mapping):
typ = variant_mapping[dialect]
base += ".with_variant(%s, %r)" % (
@@ -1008,13 +925,13 @@ def _render_primary_key(
def _fk_colspec(
fk: ForeignKey,
metadata_schema: Optional[str],
namespace_metadata: Optional[MetaData],
namespace_metadata: MetaData,
) -> str:
"""Implement a 'safe' version of ForeignKey._get_colspec() that
won't fail if the remote table can't be resolved.
"""
colspec = fk._get_colspec()
colspec = fk._get_colspec() # type:ignore[attr-defined]
tokens = colspec.split(".")
tname, colname = tokens[-2:]
@@ -1032,10 +949,7 @@ def _fk_colspec(
# the FK constraint needs to be rendered in terms of the column
# name.
if (
namespace_metadata is not None
and table_fullname in namespace_metadata.tables
):
if table_fullname in namespace_metadata.tables:
col = namespace_metadata.tables[table_fullname].c.get(colname)
if col is not None:
colname = _ident(col.name) # type: ignore[assignment]
@@ -1066,7 +980,7 @@ def _populate_render_fk_opts(
def _render_foreign_key(
constraint: ForeignKeyConstraint,
autogen_context: AutogenContext,
namespace_metadata: Optional[MetaData],
namespace_metadata: MetaData,
) -> Optional[str]:
rendered = _user_defined_render("foreign_key", constraint, autogen_context)
if rendered is not False:
@@ -1080,16 +994,15 @@ def _render_foreign_key(
_populate_render_fk_opts(constraint, opts)
apply_metadata_schema = (
namespace_metadata.schema if namespace_metadata is not None else None
)
apply_metadata_schema = namespace_metadata.schema
return (
"%(prefix)sForeignKeyConstraint([%(cols)s], "
"[%(refcols)s], %(args)s)"
% {
"prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
"cols": ", ".join(
repr(_ident(f.parent.name)) for f in constraint.elements
"%r" % _ident(cast("Column", f.parent).name)
for f in constraint.elements
),
"refcols": ", ".join(
repr(_fk_colspec(f, apply_metadata_schema, namespace_metadata))
@@ -1130,10 +1043,12 @@ def _render_check_constraint(
# ideally SQLAlchemy would give us more of a first class
# way to detect this.
if (
constraint._create_rule
and hasattr(constraint._create_rule, "target")
constraint._create_rule # type:ignore[attr-defined]
and hasattr(
constraint._create_rule, "target" # type:ignore[attr-defined]
)
and isinstance(
constraint._create_rule.target,
constraint._create_rule.target, # type:ignore[attr-defined]
sqltypes.TypeEngine,
)
):
@@ -1145,13 +1060,11 @@ def _render_check_constraint(
)
return "%(prefix)sCheckConstraint(%(sqltext)s%(opts)s)" % {
"prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
"opts": (
", " + (", ".join("%s=%s" % (k, v) for k, v in opts))
if opts
else ""
),
"opts": ", " + (", ".join("%s=%s" % (k, v) for k, v in opts))
if opts
else "",
"sqltext": _render_potential_expr(
constraint.sqltext, autogen_context, wrap_in_element=False
constraint.sqltext, autogen_context, wrap_in_text=False
),
}
@@ -1163,10 +1076,7 @@ def _execute_sql(autogen_context: AutogenContext, op: ops.ExecuteSQLOp) -> str:
"Autogenerate rendering of SQL Expression language constructs "
"not supported here; please use a plain SQL string"
)
return "{prefix}execute({sqltext!r})".format(
prefix=_alembic_autogenerate_prefix(autogen_context),
sqltext=op.sqltext,
)
return "op.execute(%r)" % op.sqltext
renderers = default_renderers.branch()

View File

@@ -4,7 +4,7 @@ from typing import Any
from typing import Callable
from typing import Iterator
from typing import List
from typing import Tuple
from typing import Optional
from typing import Type
from typing import TYPE_CHECKING
from typing import Union
@@ -16,18 +16,12 @@ if TYPE_CHECKING:
from ..operations.ops import AddColumnOp
from ..operations.ops import AlterColumnOp
from ..operations.ops import CreateTableOp
from ..operations.ops import DowngradeOps
from ..operations.ops import MigrateOperation
from ..operations.ops import MigrationScript
from ..operations.ops import ModifyTableOps
from ..operations.ops import OpContainer
from ..operations.ops import UpgradeOps
from ..runtime.environment import _GetRevArg
from ..runtime.migration import MigrationContext
from ..script.revision import _GetRevArg
ProcessRevisionDirectiveFn = Callable[
["MigrationContext", "_GetRevArg", List["MigrationScript"]], None
]
class Rewriter:
@@ -58,21 +52,15 @@ class Rewriter:
_traverse = util.Dispatcher()
_chained: Tuple[Union[ProcessRevisionDirectiveFn, Rewriter], ...] = ()
_chained: Optional[Rewriter] = None
def __init__(self) -> None:
self.dispatch = util.Dispatcher()
def chain(
self,
other: Union[
ProcessRevisionDirectiveFn,
Rewriter,
],
) -> Rewriter:
def chain(self, other: Rewriter) -> Rewriter:
"""Produce a "chain" of this :class:`.Rewriter` to another.
This allows two or more rewriters to operate serially on a stream,
This allows two rewriters to operate serially on a stream,
e.g.::
writer1 = autogenerate.Rewriter()
@@ -101,7 +89,7 @@ class Rewriter:
"""
wr = self.__class__.__new__(self.__class__)
wr.__dict__.update(self.__dict__)
wr._chained += (other,)
wr._chained = other
return wr
def rewrites(
@@ -113,7 +101,7 @@ class Rewriter:
Type[CreateTableOp],
Type[ModifyTableOps],
],
) -> Callable[..., Any]:
) -> Callable:
"""Register a function as rewriter for a given type.
The function should receive three arguments, which are
@@ -158,8 +146,8 @@ class Rewriter:
directives: List[MigrationScript],
) -> None:
self.process_revision_directives(context, revision, directives)
for process_revision_directives in self._chained:
process_revision_directives(context, revision, directives)
if self._chained:
self._chained(context, revision, directives)
@_traverse.dispatch_for(ops.MigrationScript)
def _traverse_script(
@@ -168,7 +156,7 @@ class Rewriter:
revision: _GetRevArg,
directive: MigrationScript,
) -> None:
upgrade_ops_list: List[UpgradeOps] = []
upgrade_ops_list = []
for upgrade_ops in directive.upgrade_ops_list:
ret = self._traverse_for(context, revision, upgrade_ops)
if len(ret) != 1:
@@ -176,10 +164,9 @@ class Rewriter:
"Can only return single object for UpgradeOps traverse"
)
upgrade_ops_list.append(ret[0])
directive.upgrade_ops = upgrade_ops_list
downgrade_ops_list: List[DowngradeOps] = []
downgrade_ops_list = []
for downgrade_ops in directive.downgrade_ops_list:
ret = self._traverse_for(context, revision, downgrade_ops)
if len(ret) != 1:

View File

@@ -1,9 +1,6 @@
# mypy: allow-untyped-defs, allow-untyped-calls
from __future__ import annotations
import os
import pathlib
from typing import List
from typing import Optional
from typing import TYPE_CHECKING
@@ -13,7 +10,6 @@ from . import autogenerate as autogen
from . import util
from .runtime.environment import EnvironmentContext
from .script import ScriptDirectory
from .util import compat
if TYPE_CHECKING:
from alembic.config import Config
@@ -22,7 +18,7 @@ if TYPE_CHECKING:
from .runtime.environment import ProcessRevisionDirectiveFn
def list_templates(config: Config) -> None:
def list_templates(config: Config):
"""List available templates.
:param config: a :class:`.Config` object.
@@ -30,10 +26,12 @@ def list_templates(config: Config) -> None:
"""
config.print_stdout("Available templates:\n")
for tempname in config._get_template_path().iterdir():
with (tempname / "README").open() as readme:
for tempname in os.listdir(config.get_template_directory()):
with open(
os.path.join(config.get_template_directory(), tempname, "README")
) as readme:
synopsis = next(readme).rstrip()
config.print_stdout("%s - %s", tempname.name, synopsis)
config.print_stdout("%s - %s", tempname, synopsis)
config.print_stdout("\nTemplates are used via the 'init' command, e.g.:")
config.print_stdout("\n alembic init --template generic ./scripts")
@@ -49,7 +47,7 @@ def init(
:param config: a :class:`.Config` object.
:param directory: string path of the target directory.
:param directory: string path of the target directory
:param template: string name of the migration environment template to
use.
@@ -59,136 +57,65 @@ def init(
"""
directory_path = pathlib.Path(directory)
if directory_path.exists() and list(directory_path.iterdir()):
if os.access(directory, os.F_OK) and os.listdir(directory):
raise util.CommandError(
"Directory %s already exists and is not empty" % directory_path
"Directory %s already exists and is not empty" % directory
)
template_path = config._get_template_path() / template
template_dir = os.path.join(config.get_template_directory(), template)
if not os.access(template_dir, os.F_OK):
raise util.CommandError("No such template %r" % template)
if not template_path.exists():
raise util.CommandError(f"No such template {template_path}")
# left as os.access() to suit unit test mocking
if not os.access(directory_path, os.F_OK):
if not os.access(directory, os.F_OK):
with util.status(
f"Creating directory {directory_path.absolute()}",
f"Creating directory {os.path.abspath(directory)!r}",
**config.messaging_opts,
):
os.makedirs(directory_path)
os.makedirs(directory)
versions = directory_path / "versions"
versions = os.path.join(directory, "versions")
with util.status(
f"Creating directory {versions.absolute()}",
f"Creating directory {os.path.abspath(versions)!r}",
**config.messaging_opts,
):
os.makedirs(versions)
if not directory_path.is_absolute():
# for non-absolute path, state config file in .ini / pyproject
# as relative to the %(here)s token, which is where the config
# file itself would be
script = ScriptDirectory(directory)
if config._config_file_path is not None:
rel_dir = compat.path_relative_to(
directory_path.absolute(),
config._config_file_path.absolute().parent,
walk_up=True,
)
ini_script_location_directory = ("%(here)s" / rel_dir).as_posix()
if config._toml_file_path is not None:
rel_dir = compat.path_relative_to(
directory_path.absolute(),
config._toml_file_path.absolute().parent,
walk_up=True,
)
toml_script_location_directory = ("%(here)s" / rel_dir).as_posix()
else:
ini_script_location_directory = directory_path.as_posix()
toml_script_location_directory = directory_path.as_posix()
script = ScriptDirectory(directory_path)
has_toml = False
config_file: pathlib.Path | None = None
for file_path in template_path.iterdir():
file_ = file_path.name
config_file: str | None = None
for file_ in os.listdir(template_dir):
file_path = os.path.join(template_dir, file_)
if file_ == "alembic.ini.mako":
assert config.config_file_name is not None
config_file = pathlib.Path(config.config_file_name).absolute()
if config_file.exists():
config_file = os.path.abspath(config.config_file_name)
if os.access(config_file, os.F_OK):
util.msg(
f"File {config_file} already exists, skipping",
f"File {config_file!r} already exists, skipping",
**config.messaging_opts,
)
else:
script._generate_template(
file_path,
config_file,
script_location=ini_script_location_directory,
file_path, config_file, script_location=directory
)
elif file_ == "pyproject.toml.mako":
has_toml = True
assert config._toml_file_path is not None
toml_path = config._toml_file_path.absolute()
if toml_path.exists():
# left as open() to suit unit test mocking
with open(toml_path, "rb") as f:
toml_data = compat.tomllib.load(f)
if "tool" in toml_data and "alembic" in toml_data["tool"]:
util.msg(
f"File {toml_path} already exists "
"and already has a [tool.alembic] section, "
"skipping",
)
continue
script._append_template(
file_path,
toml_path,
script_location=toml_script_location_directory,
)
else:
script._generate_template(
file_path,
toml_path,
script_location=toml_script_location_directory,
)
elif file_path.is_file():
output_file = directory_path / file_
elif os.path.isfile(file_path):
output_file = os.path.join(directory, file_)
script._copy_file(file_path, output_file)
if package:
for path in [
directory_path.absolute() / "__init__.py",
versions.absolute() / "__init__.py",
os.path.join(os.path.abspath(directory), "__init__.py"),
os.path.join(os.path.abspath(versions), "__init__.py"),
]:
with util.status(f"Adding {path!s}", **config.messaging_opts):
# left as open() to suit unit test mocking
with util.status(f"Adding {path!r}", **config.messaging_opts):
with open(path, "w"):
pass
assert config_file is not None
if has_toml:
util.msg(
f"Please edit configuration settings in {toml_path} and "
"configuration/connection/logging "
f"settings in {config_file} before proceeding.",
**config.messaging_opts,
)
else:
util.msg(
"Please edit configuration/connection/logging "
f"settings in {config_file} before proceeding.",
**config.messaging_opts,
)
util.msg(
"Please edit configuration/connection/logging "
f"settings in {config_file!r} before proceeding.",
**config.messaging_opts,
)
def revision(
@@ -199,7 +126,7 @@ def revision(
head: str = "head",
splice: bool = False,
branch_label: Optional[_RevIdType] = None,
version_path: Union[str, os.PathLike[str], None] = None,
version_path: Optional[str] = None,
rev_id: Optional[str] = None,
depends_on: Optional[str] = None,
process_revision_directives: Optional[ProcessRevisionDirectiveFn] = None,
@@ -245,7 +172,7 @@ def revision(
will be applied to the structure generated by the revision process
where it can be altered programmatically. Note that unlike all
the other parameters, this option is only available via programmatic
use of :func:`.command.revision`.
use of :func:`.command.revision`
"""
@@ -269,9 +196,7 @@ def revision(
process_revision_directives=process_revision_directives,
)
environment = util.asbool(
config.get_alembic_option("revision_environment")
)
environment = util.asbool(config.get_main_option("revision_environment"))
if autogenerate:
environment = True
@@ -365,15 +290,10 @@ def check(config: "Config") -> None:
# the revision_context now has MigrationScript structure(s) present.
migration_script = revision_context.generated_revisions[-1]
diffs = []
for upgrade_ops in migration_script.upgrade_ops_list:
diffs.extend(upgrade_ops.as_diffs())
diffs = migration_script.upgrade_ops.as_diffs()
if diffs:
raise util.AutogenerateDiffsDetected(
f"New upgrade operations detected: {diffs}",
revision_context=revision_context,
diffs=diffs,
f"New upgrade operations detected: {diffs}"
)
else:
config.print_stdout("No new upgrade operations detected.")
@@ -390,11 +310,9 @@ def merge(
:param config: a :class:`.Config` instance
:param revisions: The revisions to merge.
:param message: string message to apply to the revision
:param message: string message to apply to the revision.
:param branch_label: string label name to apply to the new revision.
:param branch_label: string label name to apply to the new revision
:param rev_id: hardcoded revision identifier instead of generating a new
one.
@@ -411,9 +329,7 @@ def merge(
# e.g. multiple databases
}
environment = util.asbool(
config.get_alembic_option("revision_environment")
)
environment = util.asbool(config.get_main_option("revision_environment"))
if environment:
@@ -449,10 +365,9 @@ def upgrade(
:param config: a :class:`.Config` instance.
:param revision: string revision target or range for --sql mode. May be
``"heads"`` to target the most recent revision(s).
:param revision: string revision target or range for --sql mode
:param sql: if True, use ``--sql`` mode.
:param sql: if True, use ``--sql`` mode
:param tag: an arbitrary "tag" that can be intercepted by custom
``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument`
@@ -493,10 +408,9 @@ def downgrade(
:param config: a :class:`.Config` instance.
:param revision: string revision target or range for --sql mode. May
be ``"base"`` to target the first revision.
:param revision: string revision target or range for --sql mode
:param sql: if True, use ``--sql`` mode.
:param sql: if True, use ``--sql`` mode
:param tag: an arbitrary "tag" that can be intercepted by custom
``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument`
@@ -530,13 +444,12 @@ def downgrade(
script.run_env()
def show(config: Config, rev: str) -> None:
def show(config, rev):
"""Show the revision(s) denoted by the given symbol.
:param config: a :class:`.Config` instance.
:param rev: string revision target. May be ``"current"`` to show the
revision(s) currently applied in the database.
:param revision: string revision target
"""
@@ -566,7 +479,7 @@ def history(
:param config: a :class:`.Config` instance.
:param rev_range: string revision range.
:param rev_range: string revision range
:param verbose: output in verbose mode.
@@ -586,7 +499,7 @@ def history(
base = head = None
environment = (
util.asbool(config.get_alembic_option("revision_environment"))
util.asbool(config.get_main_option("revision_environment"))
or indicate_current
)
@@ -625,9 +538,7 @@ def history(
_display_history(config, script, base, head)
def heads(
config: Config, verbose: bool = False, resolve_dependencies: bool = False
) -> None:
def heads(config, verbose=False, resolve_dependencies=False):
"""Show current available heads in the script directory.
:param config: a :class:`.Config` instance.
@@ -652,7 +563,7 @@ def heads(
)
def branches(config: Config, verbose: bool = False) -> None:
def branches(config, verbose=False):
"""Show current branch points.
:param config: a :class:`.Config` instance.
@@ -722,9 +633,7 @@ def stamp(
:param config: a :class:`.Config` instance.
:param revision: target revision or list of revisions. May be a list
to indicate stamping of multiple branch heads; may be ``"base"``
to remove all revisions from the table or ``"heads"`` to stamp the
most recent revision(s).
to indicate stamping of multiple branch heads.
.. note:: this parameter is called "revisions" in the command line
interface.
@@ -814,7 +723,7 @@ def ensure_version(config: Config, sql: bool = False) -> None:
:param config: a :class:`.Config` instance.
:param sql: use ``--sql`` mode.
:param sql: use ``--sql`` mode
.. versionadded:: 1.7.6

File diff suppressed because it is too large Load Diff

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from typing import Any
from typing import Callable
from typing import Collection
from typing import ContextManager
from typing import Dict
from typing import Iterable
from typing import List
@@ -13,14 +14,11 @@ from typing import Mapping
from typing import MutableMapping
from typing import Optional
from typing import overload
from typing import Sequence
from typing import TextIO
from typing import Tuple
from typing import TYPE_CHECKING
from typing import Union
from typing_extensions import ContextManager
if TYPE_CHECKING:
from sqlalchemy.engine.base import Connection
from sqlalchemy.engine.url import URL
@@ -41,9 +39,7 @@ if TYPE_CHECKING:
### end imports ###
def begin_transaction() -> (
Union[_ProxyTransaction, ContextManager[None, Optional[bool]]]
):
def begin_transaction() -> Union[_ProxyTransaction, ContextManager[None]]:
"""Return a context manager that will
enclose an operation within a "transaction",
as defined by the environment's offline
@@ -101,7 +97,7 @@ def configure(
tag: Optional[str] = None,
template_args: Optional[Dict[str, Any]] = None,
render_as_batch: bool = False,
target_metadata: Union[MetaData, Sequence[MetaData], None] = None,
target_metadata: Optional[MetaData] = None,
include_name: Optional[
Callable[
[
@@ -163,8 +159,8 @@ def configure(
MigrationContext,
Column[Any],
Column[Any],
TypeEngine[Any],
TypeEngine[Any],
TypeEngine,
TypeEngine,
],
Optional[bool],
],
@@ -639,8 +635,7 @@ def configure(
"""
def execute(
sql: Union[Executable, str],
execution_options: Optional[Dict[str, Any]] = None,
sql: Union[Executable, str], execution_options: Optional[dict] = None
) -> None:
"""Execute the given SQL using the current change context.
@@ -763,11 +758,7 @@ def get_x_argument(
The return value is a list, returned directly from the ``argparse``
structure. If ``as_dictionary=True`` is passed, the ``x`` arguments
are parsed using ``key=value`` format into a dictionary that is
then returned. If there is no ``=`` in the argument, value is an empty
string.
.. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when
arguments are passed without the ``=`` symbol.
then returned.
For example, to support passing a database URL on the command line,
the standard ``env.py`` script can be modified like this::
@@ -809,7 +800,7 @@ def is_offline_mode() -> bool:
"""
def is_transactional_ddl() -> bool:
def is_transactional_ddl():
"""Return True if the context is configured to expect a
transactional DDL capable backend.

View File

@@ -3,4 +3,4 @@ from . import mysql
from . import oracle
from . import postgresql
from . import sqlite
from .impl import DefaultImpl as DefaultImpl
from .impl import DefaultImpl

View File

@@ -1,329 +0,0 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
from typing import Any
from typing import ClassVar
from typing import Dict
from typing import Generic
from typing import NamedTuple
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import Type
from typing import TYPE_CHECKING
from typing import TypeVar
from typing import Union
from sqlalchemy.sql.schema import Constraint
from sqlalchemy.sql.schema import ForeignKeyConstraint
from sqlalchemy.sql.schema import Index
from sqlalchemy.sql.schema import UniqueConstraint
from typing_extensions import TypeGuard
from .. import util
from ..util import sqla_compat
if TYPE_CHECKING:
from typing import Literal
from alembic.autogenerate.api import AutogenContext
from alembic.ddl.impl import DefaultImpl
CompareConstraintType = Union[Constraint, Index]
_C = TypeVar("_C", bound=CompareConstraintType)
_clsreg: Dict[str, Type[_constraint_sig]] = {}
class ComparisonResult(NamedTuple):
status: Literal["equal", "different", "skip"]
message: str
@property
def is_equal(self) -> bool:
return self.status == "equal"
@property
def is_different(self) -> bool:
return self.status == "different"
@property
def is_skip(self) -> bool:
return self.status == "skip"
@classmethod
def Equal(cls) -> ComparisonResult:
"""the constraints are equal."""
return cls("equal", "The two constraints are equal")
@classmethod
def Different(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult:
"""the constraints are different for the provided reason(s)."""
return cls("different", ", ".join(util.to_list(reason)))
@classmethod
def Skip(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult:
"""the constraint cannot be compared for the provided reason(s).
The message is logged, but the constraints will be otherwise
considered equal, meaning that no migration command will be
generated.
"""
return cls("skip", ", ".join(util.to_list(reason)))
class _constraint_sig(Generic[_C]):
const: _C
_sig: Tuple[Any, ...]
name: Optional[sqla_compat._ConstraintNameDefined]
impl: DefaultImpl
_is_index: ClassVar[bool] = False
_is_fk: ClassVar[bool] = False
_is_uq: ClassVar[bool] = False
_is_metadata: bool
def __init_subclass__(cls) -> None:
cls._register()
@classmethod
def _register(cls):
raise NotImplementedError()
def __init__(
self, is_metadata: bool, impl: DefaultImpl, const: _C
) -> None:
raise NotImplementedError()
def compare_to_reflected(
self, other: _constraint_sig[Any]
) -> ComparisonResult:
assert self.impl is other.impl
assert self._is_metadata
assert not other._is_metadata
return self._compare_to_reflected(other)
def _compare_to_reflected(
self, other: _constraint_sig[_C]
) -> ComparisonResult:
raise NotImplementedError()
@classmethod
def from_constraint(
cls, is_metadata: bool, impl: DefaultImpl, constraint: _C
) -> _constraint_sig[_C]:
# these could be cached by constraint/impl, however, if the
# constraint is modified in place, then the sig is wrong. the mysql
# impl currently does this, and if we fixed that we can't be sure
# someone else might do it too, so play it safe.
sig = _clsreg[constraint.__visit_name__](is_metadata, impl, constraint)
return sig
def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]:
return sqla_compat._get_constraint_final_name(
self.const, context.dialect
)
@util.memoized_property
def is_named(self):
return sqla_compat._constraint_is_named(self.const, self.impl.dialect)
@util.memoized_property
def unnamed(self) -> Tuple[Any, ...]:
return self._sig
@util.memoized_property
def unnamed_no_options(self) -> Tuple[Any, ...]:
raise NotImplementedError()
@util.memoized_property
def _full_sig(self) -> Tuple[Any, ...]:
return (self.name,) + self.unnamed
def __eq__(self, other) -> bool:
return self._full_sig == other._full_sig
def __ne__(self, other) -> bool:
return self._full_sig != other._full_sig
def __hash__(self) -> int:
return hash(self._full_sig)
class _uq_constraint_sig(_constraint_sig[UniqueConstraint]):
_is_uq = True
@classmethod
def _register(cls) -> None:
_clsreg["unique_constraint"] = cls
is_unique = True
def __init__(
self,
is_metadata: bool,
impl: DefaultImpl,
const: UniqueConstraint,
) -> None:
self.impl = impl
self.const = const
self.name = sqla_compat.constraint_name_or_none(const.name)
self._sig = tuple(sorted([col.name for col in const.columns]))
self._is_metadata = is_metadata
@property
def column_names(self) -> Tuple[str, ...]:
return tuple([col.name for col in self.const.columns])
def _compare_to_reflected(
self, other: _constraint_sig[_C]
) -> ComparisonResult:
assert self._is_metadata
metadata_obj = self
conn_obj = other
assert is_uq_sig(conn_obj)
return self.impl.compare_unique_constraint(
metadata_obj.const, conn_obj.const
)
class _ix_constraint_sig(_constraint_sig[Index]):
_is_index = True
name: sqla_compat._ConstraintName
@classmethod
def _register(cls) -> None:
_clsreg["index"] = cls
def __init__(
self, is_metadata: bool, impl: DefaultImpl, const: Index
) -> None:
self.impl = impl
self.const = const
self.name = const.name
self.is_unique = bool(const.unique)
self._is_metadata = is_metadata
def _compare_to_reflected(
self, other: _constraint_sig[_C]
) -> ComparisonResult:
assert self._is_metadata
metadata_obj = self
conn_obj = other
assert is_index_sig(conn_obj)
return self.impl.compare_indexes(metadata_obj.const, conn_obj.const)
@util.memoized_property
def has_expressions(self):
return sqla_compat.is_expression_index(self.const)
@util.memoized_property
def column_names(self) -> Tuple[str, ...]:
return tuple([col.name for col in self.const.columns])
@util.memoized_property
def column_names_optional(self) -> Tuple[Optional[str], ...]:
return tuple(
[getattr(col, "name", None) for col in self.const.expressions]
)
@util.memoized_property
def is_named(self):
return True
@util.memoized_property
def unnamed(self):
return (self.is_unique,) + self.column_names_optional
class _fk_constraint_sig(_constraint_sig[ForeignKeyConstraint]):
_is_fk = True
@classmethod
def _register(cls) -> None:
_clsreg["foreign_key_constraint"] = cls
def __init__(
self,
is_metadata: bool,
impl: DefaultImpl,
const: ForeignKeyConstraint,
) -> None:
self._is_metadata = is_metadata
self.impl = impl
self.const = const
self.name = sqla_compat.constraint_name_or_none(const.name)
(
self.source_schema,
self.source_table,
self.source_columns,
self.target_schema,
self.target_table,
self.target_columns,
onupdate,
ondelete,
deferrable,
initially,
) = sqla_compat._fk_spec(const)
self._sig: Tuple[Any, ...] = (
self.source_schema,
self.source_table,
tuple(self.source_columns),
self.target_schema,
self.target_table,
tuple(self.target_columns),
) + (
(
(None if onupdate.lower() == "no action" else onupdate.lower())
if onupdate
else None
),
(
(None if ondelete.lower() == "no action" else ondelete.lower())
if ondelete
else None
),
# convert initially + deferrable into one three-state value
(
"initially_deferrable"
if initially and initially.lower() == "deferred"
else "deferrable" if deferrable else "not deferrable"
),
)
@util.memoized_property
def unnamed_no_options(self):
return (
self.source_schema,
self.source_table,
tuple(self.source_columns),
self.target_schema,
self.target_table,
tuple(self.target_columns),
)
def is_index_sig(sig: _constraint_sig) -> TypeGuard[_ix_constraint_sig]:
return sig._is_index
def is_uq_sig(sig: _constraint_sig) -> TypeGuard[_uq_constraint_sig]:
return sig._is_uq
def is_fk_sig(sig: _constraint_sig) -> TypeGuard[_fk_constraint_sig]:
return sig._is_fk

View File

@@ -1,6 +1,3 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
import functools
@@ -25,8 +22,6 @@ from ..util.sqla_compat import _table_for_constraint # noqa
if TYPE_CHECKING:
from typing import Any
from sqlalchemy import Computed
from sqlalchemy import Identity
from sqlalchemy.sql.compiler import Compiled
from sqlalchemy.sql.compiler import DDLCompiler
from sqlalchemy.sql.elements import TextClause
@@ -35,11 +30,14 @@ if TYPE_CHECKING:
from sqlalchemy.sql.type_api import TypeEngine
from .impl import DefaultImpl
from ..util.sqla_compat import Computed
from ..util.sqla_compat import Identity
_ServerDefault = Union["TextClause", "FetchedValue", "Function[Any]", str]
class AlterTable(DDLElement):
"""Represent an ALTER TABLE statement.
Only the string name and optional schema name of the table
@@ -154,24 +152,17 @@ class AddColumn(AlterTable):
name: str,
column: Column[Any],
schema: Optional[Union[quoted_name, str]] = None,
if_not_exists: Optional[bool] = None,
) -> None:
super().__init__(name, schema=schema)
self.column = column
self.if_not_exists = if_not_exists
class DropColumn(AlterTable):
def __init__(
self,
name: str,
column: Column[Any],
schema: Optional[str] = None,
if_exists: Optional[bool] = None,
self, name: str, column: Column[Any], schema: Optional[str] = None
) -> None:
super().__init__(name, schema=schema)
self.column = column
self.if_exists = if_exists
class ColumnComment(AlterColumn):
@@ -196,9 +187,7 @@ def visit_rename_table(
def visit_add_column(element: AddColumn, compiler: DDLCompiler, **kw) -> str:
return "%s %s" % (
alter_table(compiler, element.table_name, element.schema),
add_column(
compiler, element.column, if_not_exists=element.if_not_exists, **kw
),
add_column(compiler, element.column, **kw),
)
@@ -206,9 +195,7 @@ def visit_add_column(element: AddColumn, compiler: DDLCompiler, **kw) -> str:
def visit_drop_column(element: DropColumn, compiler: DDLCompiler, **kw) -> str:
return "%s %s" % (
alter_table(compiler, element.table_name, element.schema),
drop_column(
compiler, element.column.name, if_exists=element.if_exists, **kw
),
drop_column(compiler, element.column.name, **kw),
)
@@ -248,11 +235,9 @@ def visit_column_default(
return "%s %s %s" % (
alter_table(compiler, element.table_name, element.schema),
alter_column(compiler, element.column_name),
(
"SET DEFAULT %s" % format_server_default(compiler, element.default)
if element.default is not None
else "DROP DEFAULT"
),
"SET DEFAULT %s" % format_server_default(compiler, element.default)
if element.default is not None
else "DROP DEFAULT",
)
@@ -310,13 +295,9 @@ def format_server_default(
compiler: DDLCompiler,
default: Optional[_ServerDefault],
) -> str:
# this can be updated to use compiler.render_default_string
# for SQLAlchemy 2.0 and above; not in 1.4
default_str = compiler.get_column_default_string(
return compiler.get_column_default_string(
Column("x", Integer, server_default=default)
)
assert default_str is not None
return default_str
def format_type(compiler: DDLCompiler, type_: TypeEngine) -> str:
@@ -331,29 +312,16 @@ def alter_table(
return "ALTER TABLE %s" % format_table_name(compiler, name, schema)
def drop_column(
compiler: DDLCompiler, name: str, if_exists: Optional[bool] = None, **kw
) -> str:
return "DROP COLUMN %s%s" % (
"IF EXISTS " if if_exists else "",
format_column_name(compiler, name),
)
def drop_column(compiler: DDLCompiler, name: str, **kw) -> str:
return "DROP COLUMN %s" % format_column_name(compiler, name)
def alter_column(compiler: DDLCompiler, name: str) -> str:
return "ALTER COLUMN %s" % format_column_name(compiler, name)
def add_column(
compiler: DDLCompiler,
column: Column[Any],
if_not_exists: Optional[bool] = None,
**kw,
) -> str:
text = "ADD COLUMN %s%s" % (
"IF NOT EXISTS " if if_not_exists else "",
compiler.get_column_specification(column, **kw),
)
def add_column(compiler: DDLCompiler, column: Column[Any], **kw) -> str:
text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw)
const = " ".join(
compiler.process(constraint) for constraint in column.constraints

View File

@@ -1,9 +1,6 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
import logging
from collections import namedtuple
import re
from typing import Any
from typing import Callable
@@ -11,7 +8,6 @@ from typing import Dict
from typing import Iterable
from typing import List
from typing import Mapping
from typing import NamedTuple
from typing import Optional
from typing import Sequence
from typing import Set
@@ -21,18 +17,10 @@ from typing import TYPE_CHECKING
from typing import Union
from sqlalchemy import cast
from sqlalchemy import Column
from sqlalchemy import MetaData
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import schema
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import text
from . import _autogen
from . import base
from ._autogen import _constraint_sig as _constraint_sig
from ._autogen import ComparisonResult as ComparisonResult
from .. import util
from ..util import sqla_compat
@@ -46,10 +34,13 @@ if TYPE_CHECKING:
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.sql import ClauseElement
from sqlalchemy.sql import Executable
from sqlalchemy.sql.elements import ColumnElement
from sqlalchemy.sql.elements import quoted_name
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.schema import Constraint
from sqlalchemy.sql.schema import ForeignKeyConstraint
from sqlalchemy.sql.schema import Index
from sqlalchemy.sql.schema import Table
from sqlalchemy.sql.schema import UniqueConstraint
from sqlalchemy.sql.selectable import TableClause
from sqlalchemy.sql.type_api import TypeEngine
@@ -59,8 +50,6 @@ if TYPE_CHECKING:
from ..operations.batch import ApplyBatchImpl
from ..operations.batch import BatchOperationsImpl
log = logging.getLogger(__name__)
class ImplMeta(type):
def __init__(
@@ -77,8 +66,11 @@ class ImplMeta(type):
_impls: Dict[str, Type[DefaultImpl]] = {}
Params = namedtuple("Params", ["token0", "tokens", "args", "kwargs"])
class DefaultImpl(metaclass=ImplMeta):
"""Provide the entrypoint for major migration operations,
including database-specific behavioral variances.
@@ -138,40 +130,6 @@ class DefaultImpl(metaclass=ImplMeta):
self.output_buffer.write(text + "\n\n")
self.output_buffer.flush()
def version_table_impl(
self,
*,
version_table: str,
version_table_schema: Optional[str],
version_table_pk: bool,
**kw: Any,
) -> Table:
"""Generate a :class:`.Table` object which will be used as the
structure for the Alembic version table.
Third party dialects may override this hook to provide an alternate
structure for this :class:`.Table`; requirements are only that it
be named based on the ``version_table`` parameter and contains
at least a single string-holding column named ``version_num``.
.. versionadded:: 1.14
"""
vt = Table(
version_table,
MetaData(),
Column("version_num", String(32), nullable=False),
schema=version_table_schema,
)
if version_table_pk:
vt.append_constraint(
PrimaryKeyConstraint(
"version_num", name=f"{version_table}_pkc"
)
)
return vt
def requires_recreate_in_batch(
self, batch_op: BatchOperationsImpl
) -> bool:
@@ -203,15 +161,16 @@ class DefaultImpl(metaclass=ImplMeta):
def _exec(
self,
construct: Union[Executable, str],
execution_options: Optional[Mapping[str, Any]] = None,
multiparams: Optional[Sequence[Mapping[str, Any]]] = None,
params: Mapping[str, Any] = util.immutabledict(),
execution_options: Optional[dict[str, Any]] = None,
multiparams: Sequence[dict] = (),
params: Dict[str, Any] = util.immutabledict(),
) -> Optional[CursorResult]:
if isinstance(construct, str):
construct = text(construct)
if self.as_sql:
if multiparams is not None or params:
raise TypeError("SQL parameters not allowed with as_sql")
if multiparams or params:
# TODO: coverage
raise Exception("Execution arguments not allowed with as_sql")
compile_kw: dict[str, Any]
if self.literal_binds and not isinstance(
@@ -234,16 +193,11 @@ class DefaultImpl(metaclass=ImplMeta):
assert conn is not None
if execution_options:
conn = conn.execution_options(**execution_options)
if params:
assert isinstance(multiparams, tuple)
multiparams += (params,)
if params and multiparams is not None:
raise TypeError(
"Can't send params and multiparams at the same time"
)
if multiparams:
return conn.execute(construct, multiparams)
else:
return conn.execute(construct, params)
return conn.execute(construct, multiparams)
def execute(
self,
@@ -256,11 +210,8 @@ class DefaultImpl(metaclass=ImplMeta):
self,
table_name: str,
column_name: str,
*,
nullable: Optional[bool] = None,
server_default: Optional[
Union[_ServerDefault, Literal[False]]
] = False,
server_default: Union[_ServerDefault, Literal[False]] = False,
name: Optional[str] = None,
type_: Optional[TypeEngine] = None,
schema: Optional[str] = None,
@@ -371,40 +322,25 @@ class DefaultImpl(metaclass=ImplMeta):
self,
table_name: str,
column: Column[Any],
*,
schema: Optional[Union[str, quoted_name]] = None,
if_not_exists: Optional[bool] = None,
) -> None:
self._exec(
base.AddColumn(
table_name,
column,
schema=schema,
if_not_exists=if_not_exists,
)
)
self._exec(base.AddColumn(table_name, column, schema=schema))
def drop_column(
self,
table_name: str,
column: Column[Any],
*,
schema: Optional[str] = None,
if_exists: Optional[bool] = None,
**kw,
) -> None:
self._exec(
base.DropColumn(
table_name, column, schema=schema, if_exists=if_exists
)
)
self._exec(base.DropColumn(table_name, column, schema=schema))
def add_constraint(self, const: Any) -> None:
if const._create_rule is None or const._create_rule(self):
self._exec(schema.AddConstraint(const))
def drop_constraint(self, const: Constraint, **kw: Any) -> None:
self._exec(schema.DropConstraint(const, **kw))
def drop_constraint(self, const: Constraint) -> None:
self._exec(schema.DropConstraint(const))
def rename_table(
self,
@@ -416,11 +352,11 @@ class DefaultImpl(metaclass=ImplMeta):
base.RenameTable(old_table_name, new_table_name, schema=schema)
)
def create_table(self, table: Table, **kw: Any) -> None:
def create_table(self, table: Table) -> None:
table.dispatch.before_create(
table, self.connection, checkfirst=False, _ddl_runner=self
)
self._exec(schema.CreateTable(table, **kw))
self._exec(schema.CreateTable(table))
table.dispatch.after_create(
table, self.connection, checkfirst=False, _ddl_runner=self
)
@@ -439,11 +375,11 @@ class DefaultImpl(metaclass=ImplMeta):
if comment and with_comment:
self.create_column_comment(column)
def drop_table(self, table: Table, **kw: Any) -> None:
def drop_table(self, table: Table) -> None:
table.dispatch.before_drop(
table, self.connection, checkfirst=False, _ddl_runner=self
)
self._exec(schema.DropTable(table, **kw))
self._exec(schema.DropTable(table))
table.dispatch.after_drop(
table, self.connection, checkfirst=False, _ddl_runner=self
)
@@ -457,7 +393,7 @@ class DefaultImpl(metaclass=ImplMeta):
def drop_table_comment(self, table: Table) -> None:
self._exec(schema.DropTableComment(table))
def create_column_comment(self, column: Column[Any]) -> None:
def create_column_comment(self, column: ColumnElement[Any]) -> None:
self._exec(schema.SetColumnComment(column))
def drop_index(self, index: Index, **kw: Any) -> None:
@@ -476,19 +412,15 @@ class DefaultImpl(metaclass=ImplMeta):
if self.as_sql:
for row in rows:
self._exec(
table.insert()
.inline()
.values(
sqla_compat._insert_inline(table).values(
**{
k: (
sqla_compat._literal_bindparam(
k, v, type_=table.c[k].type
)
if not isinstance(
v, sqla_compat._literal_bindparam
)
else v
k: sqla_compat._literal_bindparam(
k, v, type_=table.c[k].type
)
if not isinstance(
v, sqla_compat._literal_bindparam
)
else v
for k, v in row.items()
}
)
@@ -496,13 +428,16 @@ class DefaultImpl(metaclass=ImplMeta):
else:
if rows:
if multiinsert:
self._exec(table.insert().inline(), multiparams=rows)
self._exec(
sqla_compat._insert_inline(table), multiparams=rows
)
else:
for row in rows:
self._exec(table.insert().inline().values(**row))
self._exec(
sqla_compat._insert_inline(table).values(**row)
)
def _tokenize_column_type(self, column: Column) -> Params:
definition: str
definition = self.dialect.type_compiler.process(column.type).lower()
# tokenize the SQLAlchemy-generated version of a type, so that
@@ -517,9 +452,9 @@ class DefaultImpl(metaclass=ImplMeta):
# varchar character set utf8
#
tokens: List[str] = re.findall(r"[\w\-_]+|\(.+?\)", definition)
tokens = re.findall(r"[\w\-_]+|\(.+?\)", definition)
term_tokens: List[str] = []
term_tokens = []
paren_term = None
for token in tokens:
@@ -531,7 +466,6 @@ class DefaultImpl(metaclass=ImplMeta):
params = Params(term_tokens[0], term_tokens[1:], [], {})
if paren_term:
term: str
for term in re.findall("[^(),]+", paren_term):
if "=" in term:
key, val = term.split("=")
@@ -708,7 +642,7 @@ class DefaultImpl(metaclass=ImplMeta):
diff, ignored = _compare_identity_options(
metadata_identity,
inspector_identity,
schema.Identity(),
sqla_compat.Identity(),
skip={"always"},
)
@@ -730,96 +664,15 @@ class DefaultImpl(metaclass=ImplMeta):
bool(diff) or bool(metadata_identity) != bool(inspector_identity),
)
def _compare_index_unique(
self, metadata_index: Index, reflected_index: Index
) -> Optional[str]:
conn_unique = bool(reflected_index.unique)
meta_unique = bool(metadata_index.unique)
if conn_unique != meta_unique:
return f"unique={conn_unique} to unique={meta_unique}"
else:
return None
def create_index_sig(self, index: Index) -> Tuple[Any, ...]:
# order of col matters in an index
return tuple(col.name for col in index.columns)
def _create_metadata_constraint_sig(
self, constraint: _autogen._C, **opts: Any
) -> _constraint_sig[_autogen._C]:
return _constraint_sig.from_constraint(True, self, constraint, **opts)
def _create_reflected_constraint_sig(
self, constraint: _autogen._C, **opts: Any
) -> _constraint_sig[_autogen._C]:
return _constraint_sig.from_constraint(False, self, constraint, **opts)
def compare_indexes(
self,
metadata_index: Index,
reflected_index: Index,
) -> ComparisonResult:
"""Compare two indexes by comparing the signature generated by
``create_index_sig``.
This method returns a ``ComparisonResult``.
"""
msg: List[str] = []
unique_msg = self._compare_index_unique(
metadata_index, reflected_index
)
if unique_msg:
msg.append(unique_msg)
m_sig = self._create_metadata_constraint_sig(metadata_index)
r_sig = self._create_reflected_constraint_sig(reflected_index)
assert _autogen.is_index_sig(m_sig)
assert _autogen.is_index_sig(r_sig)
# The assumption is that the index have no expression
for sig in m_sig, r_sig:
if sig.has_expressions:
log.warning(
"Generating approximate signature for index %s. "
"The dialect "
"implementation should either skip expression indexes "
"or provide a custom implementation.",
sig.const,
)
if m_sig.column_names != r_sig.column_names:
msg.append(
f"expression {r_sig.column_names} to {m_sig.column_names}"
)
if msg:
return ComparisonResult.Different(msg)
else:
return ComparisonResult.Equal()
def compare_unique_constraint(
self,
metadata_constraint: UniqueConstraint,
reflected_constraint: UniqueConstraint,
) -> ComparisonResult:
"""Compare two unique constraints by comparing the two signatures.
The arguments are two tuples that contain the unique constraint and
the signatures generated by ``create_unique_constraint_sig``.
This method returns a ``ComparisonResult``.
"""
metadata_tup = self._create_metadata_constraint_sig(
metadata_constraint
)
reflected_tup = self._create_reflected_constraint_sig(
reflected_constraint
)
meta_sig = metadata_tup.unnamed
conn_sig = reflected_tup.unnamed
if conn_sig != meta_sig:
return ComparisonResult.Different(
f"expression {conn_sig} to {meta_sig}"
)
else:
return ComparisonResult.Equal()
def create_unique_constraint_sig(
self, const: UniqueConstraint
) -> Tuple[Any, ...]:
# order of col does not matters in an unique constraint
return tuple(sorted([col.name for col in const.columns]))
def _skip_functional_indexes(self, metadata_indexes, conn_indexes):
conn_indexes_by_name = {c.name: c for c in conn_indexes}
@@ -844,13 +697,6 @@ class DefaultImpl(metaclass=ImplMeta):
return reflected_object.get("dialect_options", {})
class Params(NamedTuple):
token0: str
tokens: List[str]
args: List[str]
kwargs: Dict[str, str]
def _compare_identity_options(
metadata_io: Union[schema.Identity, schema.Sequence, None],
inspector_io: Union[schema.Identity, schema.Sequence, None],
@@ -889,13 +735,12 @@ def _compare_identity_options(
set(meta_d).union(insp_d),
)
if sqla_compat.identity_has_dialect_kwargs:
assert hasattr(default_io, "dialect_kwargs")
# use only the dialect kwargs in inspector_io since metadata_io
# can have options for many backends
check_dicts(
getattr(metadata_io, "dialect_kwargs", {}),
getattr(inspector_io, "dialect_kwargs", {}),
default_io.dialect_kwargs,
default_io.dialect_kwargs, # type: ignore[union-attr]
getattr(inspector_io, "dialect_kwargs", {}),
)

View File

@@ -1,6 +1,3 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
import re
@@ -12,6 +9,7 @@ from typing import TYPE_CHECKING
from typing import Union
from sqlalchemy import types as sqltypes
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.schema import Column
from sqlalchemy.schema import CreateIndex
from sqlalchemy.sql.base import Executable
@@ -32,7 +30,6 @@ from .base import RenameTable
from .impl import DefaultImpl
from .. import util
from ..util import sqla_compat
from ..util.sqla_compat import compiles
if TYPE_CHECKING:
from typing import Literal
@@ -83,11 +80,10 @@ class MSSQLImpl(DefaultImpl):
if self.as_sql and self.batch_separator:
self.static_output(self.batch_separator)
def alter_column(
def alter_column( # type:ignore[override]
self,
table_name: str,
column_name: str,
*,
nullable: Optional[bool] = None,
server_default: Optional[
Union[_ServerDefault, Literal[False]]
@@ -203,7 +199,6 @@ class MSSQLImpl(DefaultImpl):
self,
table_name: str,
column: Column[Any],
*,
schema: Optional[str] = None,
**kw,
) -> None:

View File

@@ -1,6 +1,3 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
import re
@@ -11,9 +8,7 @@ from typing import Union
from sqlalchemy import schema
from sqlalchemy import types as sqltypes
from sqlalchemy.sql import elements
from sqlalchemy.sql import functions
from sqlalchemy.sql import operators
from sqlalchemy.ext.compiler import compiles
from .base import alter_table
from .base import AlterColumn
@@ -25,16 +20,16 @@ from .base import format_column_name
from .base import format_server_default
from .impl import DefaultImpl
from .. import util
from ..autogenerate import compare
from ..util import sqla_compat
from ..util.sqla_compat import _is_mariadb
from ..util.sqla_compat import _is_type_bound
from ..util.sqla_compat import compiles
if TYPE_CHECKING:
from typing import Literal
from sqlalchemy.dialects.mysql.base import MySQLDDLCompiler
from sqlalchemy.sql.ddl import DropConstraint
from sqlalchemy.sql.elements import ClauseElement
from sqlalchemy.sql.schema import Constraint
from sqlalchemy.sql.type_api import TypeEngine
@@ -51,40 +46,12 @@ class MySQLImpl(DefaultImpl):
)
type_arg_extract = [r"character set ([\w\-_]+)", r"collate ([\w\-_]+)"]
def render_ddl_sql_expr(
self,
expr: ClauseElement,
is_server_default: bool = False,
is_index: bool = False,
**kw: Any,
) -> str:
# apply Grouping to index expressions;
# see https://github.com/sqlalchemy/sqlalchemy/blob/
# 36da2eaf3e23269f2cf28420ae73674beafd0661/
# lib/sqlalchemy/dialects/mysql/base.py#L2191
if is_index and (
isinstance(expr, elements.BinaryExpression)
or (
isinstance(expr, elements.UnaryExpression)
and expr.modifier not in (operators.desc_op, operators.asc_op)
)
or isinstance(expr, functions.FunctionElement)
):
expr = elements.Grouping(expr)
return super().render_ddl_sql_expr(
expr, is_server_default=is_server_default, is_index=is_index, **kw
)
def alter_column(
def alter_column( # type:ignore[override]
self,
table_name: str,
column_name: str,
*,
nullable: Optional[bool] = None,
server_default: Optional[
Union[_ServerDefault, Literal[False]]
] = False,
server_default: Union[_ServerDefault, Literal[False]] = False,
name: Optional[str] = None,
type_: Optional[TypeEngine] = None,
schema: Optional[str] = None,
@@ -125,29 +92,21 @@ class MySQLImpl(DefaultImpl):
column_name,
schema=schema,
newname=name if name is not None else column_name,
nullable=(
nullable
if nullable is not None
else (
existing_nullable
if existing_nullable is not None
else True
)
),
nullable=nullable
if nullable is not None
else existing_nullable
if existing_nullable is not None
else True,
type_=type_ if type_ is not None else existing_type,
default=(
server_default
if server_default is not False
else existing_server_default
),
autoincrement=(
autoincrement
if autoincrement is not None
else existing_autoincrement
),
comment=(
comment if comment is not False else existing_comment
),
default=server_default
if server_default is not False
else existing_server_default,
autoincrement=autoincrement
if autoincrement is not None
else existing_autoincrement,
comment=comment
if comment is not False
else existing_comment,
)
)
elif (
@@ -162,29 +121,21 @@ class MySQLImpl(DefaultImpl):
column_name,
schema=schema,
newname=name if name is not None else column_name,
nullable=(
nullable
if nullable is not None
else (
existing_nullable
if existing_nullable is not None
else True
)
),
nullable=nullable
if nullable is not None
else existing_nullable
if existing_nullable is not None
else True,
type_=type_ if type_ is not None else existing_type,
default=(
server_default
if server_default is not False
else existing_server_default
),
autoincrement=(
autoincrement
if autoincrement is not None
else existing_autoincrement
),
comment=(
comment if comment is not False else existing_comment
),
default=server_default
if server_default is not False
else existing_server_default,
autoincrement=autoincrement
if autoincrement is not None
else existing_autoincrement,
comment=comment
if comment is not False
else existing_comment,
)
)
elif server_default is not False:
@@ -197,7 +148,6 @@ class MySQLImpl(DefaultImpl):
def drop_constraint(
self,
const: Constraint,
**kw: Any,
) -> None:
if isinstance(const, schema.CheckConstraint) and _is_type_bound(const):
return
@@ -207,11 +157,12 @@ class MySQLImpl(DefaultImpl):
def _is_mysql_allowed_functional_default(
self,
type_: Optional[TypeEngine],
server_default: Optional[Union[_ServerDefault, Literal[False]]],
server_default: Union[_ServerDefault, Literal[False]],
) -> bool:
return (
type_ is not None
and type_._type_affinity is sqltypes.DateTime
and type_._type_affinity # type:ignore[attr-defined]
is sqltypes.DateTime
and server_default is not None
)
@@ -321,12 +272,10 @@ class MySQLImpl(DefaultImpl):
def correct_for_autogen_foreignkeys(self, conn_fks, metadata_fks):
conn_fk_by_sig = {
self._create_reflected_constraint_sig(fk).unnamed_no_options: fk
for fk in conn_fks
compare._fk_constraint_sig(fk).sig: fk for fk in conn_fks
}
metadata_fk_by_sig = {
self._create_metadata_constraint_sig(fk).unnamed_no_options: fk
for fk in metadata_fks
compare._fk_constraint_sig(fk).sig: fk for fk in metadata_fks
}
for sig in set(conn_fk_by_sig).intersection(metadata_fk_by_sig):
@@ -358,7 +307,7 @@ class MySQLAlterDefault(AlterColumn):
self,
name: str,
column_name: str,
default: Optional[_ServerDefault],
default: _ServerDefault,
schema: Optional[str] = None,
) -> None:
super(AlterColumn, self).__init__(name, schema=schema)
@@ -416,11 +365,9 @@ def _mysql_alter_default(
return "%s ALTER COLUMN %s %s" % (
alter_table(compiler, element.table_name, element.schema),
format_column_name(compiler, element.column_name),
(
"SET DEFAULT %s" % format_server_default(compiler, element.default)
if element.default is not None
else "DROP DEFAULT"
),
"SET DEFAULT %s" % format_server_default(compiler, element.default)
if element.default is not None
else "DROP DEFAULT",
)
@@ -507,7 +454,7 @@ def _mysql_drop_constraint(
# note that SQLAlchemy as of 1.2 does not yet support
# DROP CONSTRAINT for MySQL/MariaDB, so we implement fully
# here.
if compiler.dialect.is_mariadb:
if _is_mariadb(compiler.dialect):
return "ALTER TABLE %s DROP CONSTRAINT %s" % (
compiler.preparer.format_table(constraint.table),
compiler.preparer.format_constraint(constraint),

View File

@@ -1,6 +1,3 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
import re
@@ -8,6 +5,7 @@ from typing import Any
from typing import Optional
from typing import TYPE_CHECKING
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql import sqltypes
from .base import AddColumn
@@ -24,7 +22,6 @@ from .base import format_type
from .base import IdentityColumnDefault
from .base import RenameTable
from .impl import DefaultImpl
from ..util.sqla_compat import compiles
if TYPE_CHECKING:
from sqlalchemy.dialects.oracle.base import OracleDDLCompiler
@@ -141,11 +138,9 @@ def visit_column_default(
return "%s %s %s" % (
alter_table(compiler, element.table_name, element.schema),
alter_column(compiler, element.column_name),
(
"DEFAULT %s" % format_server_default(compiler, element.default)
if element.default is not None
else "DEFAULT NULL"
),
"DEFAULT %s" % format_server_default(compiler, element.default)
if element.default is not None
else "DEFAULT NULL",
)

View File

@@ -1,6 +1,3 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
import logging
@@ -16,19 +13,18 @@ from typing import TYPE_CHECKING
from typing import Union
from sqlalchemy import Column
from sqlalchemy import Float
from sqlalchemy import Identity
from sqlalchemy import literal_column
from sqlalchemy import Numeric
from sqlalchemy import select
from sqlalchemy import text
from sqlalchemy import types as sqltypes
from sqlalchemy.dialects.postgresql import BIGINT
from sqlalchemy.dialects.postgresql import ExcludeConstraint
from sqlalchemy.dialects.postgresql import INTEGER
from sqlalchemy.schema import CreateIndex
from sqlalchemy.sql import operators
from sqlalchemy.sql.elements import ColumnClause
from sqlalchemy.sql.elements import TextClause
from sqlalchemy.sql.elements import UnaryExpression
from sqlalchemy.sql.functions import FunctionElement
from sqlalchemy.types import NULLTYPE
@@ -36,12 +32,12 @@ from .base import alter_column
from .base import alter_table
from .base import AlterColumn
from .base import ColumnComment
from .base import compiles
from .base import format_column_name
from .base import format_table_name
from .base import format_type
from .base import IdentityColumnDefault
from .base import RenameTable
from .impl import ComparisonResult
from .impl import DefaultImpl
from .. import util
from ..autogenerate import render
@@ -50,8 +46,6 @@ from ..operations import schemaobj
from ..operations.base import BatchOperations
from ..operations.base import Operations
from ..util import sqla_compat
from ..util.sqla_compat import compiles
if TYPE_CHECKING:
from typing import Literal
@@ -136,28 +130,25 @@ class PostgresqlImpl(DefaultImpl):
metadata_default = metadata_column.server_default.arg
if isinstance(metadata_default, str):
if not isinstance(inspector_column.type, (Numeric, Float)):
if not isinstance(inspector_column.type, Numeric):
metadata_default = re.sub(r"^'|'$", "", metadata_default)
metadata_default = f"'{metadata_default}'"
metadata_default = literal_column(metadata_default)
# run a real compare against the server
conn = self.connection
assert conn is not None
return not conn.scalar(
select(literal_column(conn_col_default) == metadata_default)
return not self.connection.scalar(
sqla_compat._select(
literal_column(conn_col_default) == metadata_default
)
)
def alter_column(
def alter_column( # type:ignore[override]
self,
table_name: str,
column_name: str,
*,
nullable: Optional[bool] = None,
server_default: Optional[
Union[_ServerDefault, Literal[False]]
] = False,
server_default: Union[_ServerDefault, Literal[False]] = False,
name: Optional[str] = None,
type_: Optional[TypeEngine] = None,
schema: Optional[str] = None,
@@ -223,8 +214,7 @@ class PostgresqlImpl(DefaultImpl):
"join pg_class t on t.oid=d.refobjid "
"join pg_attribute a on a.attrelid=t.oid and "
"a.attnum=d.refobjsubid "
"where c.relkind='S' and "
"c.oid=cast(:seqname as regclass)"
"where c.relkind='S' and c.relname=:seqname"
),
seqname=seq_match.group(1),
).first()
@@ -262,60 +252,62 @@ class PostgresqlImpl(DefaultImpl):
if not sqla_compat.sqla_2:
self._skip_functional_indexes(metadata_indexes, conn_indexes)
# pg behavior regarding modifiers
# | # | compiled sql | returned sql | regexp. group is removed |
# | - | ---------------- | -----------------| ------------------------ |
# | 1 | nulls first | nulls first | - |
# | 2 | nulls last | | (?<! desc)( nulls last)$ |
# | 3 | asc | | ( asc)$ |
# | 4 | asc nulls first | nulls first | ( asc) nulls first$ |
# | 5 | asc nulls last | | ( asc nulls last)$ |
# | 6 | desc | desc | - |
# | 7 | desc nulls first | desc | desc( nulls first)$ |
# | 8 | desc nulls last | desc nulls last | - |
_default_modifiers_re = ( # order of case 2 and 5 matters
re.compile("( asc nulls last)$"), # case 5
re.compile("(?<! desc)( nulls last)$"), # case 2
re.compile("( asc)$"), # case 3
re.compile("( asc) nulls first$"), # case 4
re.compile(" desc( nulls first)$"), # case 7
)
def _cleanup_index_expr(self, index: Index, expr: str) -> str:
def _cleanup_index_expr(
self, index: Index, expr: str, remove_suffix: str
) -> str:
# start = expr
expr = expr.lower().replace('"', "").replace("'", "")
if index.table is not None:
# should not be needed, since include_table=False is in compile
expr = expr.replace(f"{index.table.name.lower()}.", "")
while expr and expr[0] == "(" and expr[-1] == ")":
expr = expr[1:-1]
if "::" in expr:
# strip :: cast. types can have spaces in them
expr = re.sub(r"(::[\w ]+\w)", "", expr)
while expr and expr[0] == "(" and expr[-1] == ")":
expr = expr[1:-1]
if remove_suffix and expr.endswith(remove_suffix):
expr = expr[: -len(remove_suffix)]
# NOTE: when parsing the connection expression this cleanup could
# be skipped
for rs in self._default_modifiers_re:
if match := rs.search(expr):
start, end = match.span(1)
expr = expr[:start] + expr[end:]
break
while expr and expr[0] == "(" and expr[-1] == ")":
expr = expr[1:-1]
# strip casts
cast_re = re.compile(r"cast\s*\(")
if cast_re.match(expr):
expr = cast_re.sub("", expr)
# remove the as type
expr = re.sub(r"as\s+[^)]+\)", "", expr)
# remove spaces
expr = expr.replace(" ", "")
# print(f"START: {start} END: {expr}")
return expr
def _dialect_options(
def _default_modifiers(self, exp: ClauseElement) -> str:
to_remove = ""
while isinstance(exp, UnaryExpression):
if exp.modifier is None:
exp = exp.element
else:
op = exp.modifier
if isinstance(exp.element, UnaryExpression):
inner_op = exp.element.modifier
else:
inner_op = None
if inner_op is None:
if op == operators.asc_op:
# default is asc
to_remove = " asc"
elif op == operators.nullslast_op:
# default is nulls last
to_remove = " nulls last"
else:
if (
inner_op == operators.asc_op
and op == operators.nullslast_op
):
# default is asc nulls last
to_remove = " asc nulls last"
elif (
inner_op == operators.desc_op
and op == operators.nullsfirst_op
):
# default for desc is nulls first
to_remove = " nulls first"
break
return to_remove
def _dialect_sig(
self, item: Union[Index, UniqueConstraint]
) -> Tuple[Any, ...]:
# only the positive case is returned by sqlalchemy reflection so
@@ -324,93 +316,25 @@ class PostgresqlImpl(DefaultImpl):
return ("nulls_not_distinct",)
return ()
def compare_indexes(
self,
metadata_index: Index,
reflected_index: Index,
) -> ComparisonResult:
msg = []
unique_msg = self._compare_index_unique(
metadata_index, reflected_index
)
if unique_msg:
msg.append(unique_msg)
m_exprs = metadata_index.expressions
r_exprs = reflected_index.expressions
if len(m_exprs) != len(r_exprs):
msg.append(f"expression number {len(r_exprs)} to {len(m_exprs)}")
if msg:
# no point going further, return early
return ComparisonResult.Different(msg)
skip = []
for pos, (m_e, r_e) in enumerate(zip(m_exprs, r_exprs), 1):
m_compile = self._compile_element(m_e)
m_text = self._cleanup_index_expr(metadata_index, m_compile)
# print(f"META ORIG: {m_compile!r} CLEANUP: {m_text!r}")
r_compile = self._compile_element(r_e)
r_text = self._cleanup_index_expr(metadata_index, r_compile)
# print(f"CONN ORIG: {r_compile!r} CLEANUP: {r_text!r}")
if m_text == r_text:
continue # expressions these are equal
elif m_compile.strip().endswith("_ops") and (
" " in m_compile or ")" in m_compile # is an expression
):
skip.append(
f"expression #{pos} {m_compile!r} detected "
"as including operator clause."
)
util.warn(
f"Expression #{pos} {m_compile!r} in index "
f"{reflected_index.name!r} detected to include "
"an operator clause. Expression compare cannot proceed. "
"Please move the operator clause to the "
"``postgresql_ops`` dict to enable proper compare "
"of the index expressions: "
"https://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#operator-classes", # noqa: E501
)
else:
msg.append(f"expression #{pos} {r_compile!r} to {m_compile!r}")
m_options = self._dialect_options(metadata_index)
r_options = self._dialect_options(reflected_index)
if m_options != r_options:
msg.extend(f"options {r_options} to {m_options}")
if msg:
return ComparisonResult.Different(msg)
elif skip:
# if there are other changes detected don't skip the index
return ComparisonResult.Skip(skip)
else:
return ComparisonResult.Equal()
def compare_unique_constraint(
self,
metadata_constraint: UniqueConstraint,
reflected_constraint: UniqueConstraint,
) -> ComparisonResult:
metadata_tup = self._create_metadata_constraint_sig(
metadata_constraint
)
reflected_tup = self._create_reflected_constraint_sig(
reflected_constraint
)
meta_sig = metadata_tup.unnamed
conn_sig = reflected_tup.unnamed
if conn_sig != meta_sig:
return ComparisonResult.Different(
f"expression {conn_sig} to {meta_sig}"
def create_index_sig(self, index: Index) -> Tuple[Any, ...]:
return tuple(
self._cleanup_index_expr(
index,
*(
(e, "")
if isinstance(e, str)
else (self._compile_element(e), self._default_modifiers(e))
),
)
for e in index.expressions
) + self._dialect_sig(index)
metadata_do = self._dialect_options(metadata_tup.const)
conn_do = self._dialect_options(reflected_tup.const)
if metadata_do != conn_do:
return ComparisonResult.Different(
f"expression {conn_do} to {metadata_do}"
)
return ComparisonResult.Equal()
def create_unique_constraint_sig(
self, const: UniqueConstraint
) -> Tuple[Any, ...]:
return tuple(
sorted([col.name for col in const.columns])
) + self._dialect_sig(const)
def adjust_reflected_dialect_options(
self, reflected_options: Dict[str, Any], kind: str
@@ -421,9 +345,7 @@ class PostgresqlImpl(DefaultImpl):
options.pop("postgresql_include", None)
return options
def _compile_element(self, element: Union[ClauseElement, str]) -> str:
if isinstance(element, str):
return element
def _compile_element(self, element: ClauseElement) -> str:
return element.compile(
dialect=self.dialect,
compile_kwargs={"literal_binds": True, "include_table": False},
@@ -590,7 +512,7 @@ def visit_identity_column(
)
else:
text += "SET %s " % compiler.get_identity_options(
Identity(**{attr: getattr(identity, attr)})
sqla_compat.Identity(**{attr: getattr(identity, attr)})
)
return text
@@ -634,8 +556,9 @@ class CreateExcludeConstraintOp(ops.AddConstraintOp):
return cls(
constraint.name,
constraint_table.name,
[ # type: ignore
(expr, op) for expr, name, op in constraint._render_exprs
[
(expr, op)
for expr, name, op in constraint._render_exprs # type:ignore[attr-defined] # noqa
],
where=cast("ColumnElement[bool] | None", constraint.where),
schema=constraint_table.schema,
@@ -662,7 +585,7 @@ class CreateExcludeConstraintOp(ops.AddConstraintOp):
expr,
name,
oper,
) in excl._render_exprs:
) in excl._render_exprs: # type:ignore[attr-defined]
t.append_column(Column(name, NULLTYPE))
t.append_constraint(excl)
return excl
@@ -720,7 +643,7 @@ class CreateExcludeConstraintOp(ops.AddConstraintOp):
constraint_name: str,
*elements: Any,
**kw: Any,
) -> Optional[Table]:
):
"""Issue a "create exclude constraint" instruction using the
current batch migration context.
@@ -792,13 +715,10 @@ def _exclude_constraint(
args = [
"(%s, %r)"
% (
_render_potential_column(
sqltext, # type:ignore[arg-type]
autogen_context,
),
_render_potential_column(sqltext, autogen_context),
opstring,
)
for sqltext, name, opstring in constraint._render_exprs
for sqltext, name, opstring in constraint._render_exprs # type:ignore[attr-defined] # noqa
]
if constraint.where is not None:
args.append(
@@ -850,5 +770,5 @@ def _render_potential_column(
return render._render_potential_expr(
value,
autogen_context,
wrap_in_element=isinstance(value, (TextClause, FunctionElement)),
wrap_in_text=isinstance(value, (TextClause, FunctionElement)),
)

View File

@@ -1,6 +1,3 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
import re
@@ -11,19 +8,16 @@ from typing import TYPE_CHECKING
from typing import Union
from sqlalchemy import cast
from sqlalchemy import Computed
from sqlalchemy import JSON
from sqlalchemy import schema
from sqlalchemy import sql
from sqlalchemy.ext.compiler import compiles
from .base import alter_table
from .base import ColumnName
from .base import format_column_name
from .base import format_table_name
from .base import RenameTable
from .impl import DefaultImpl
from .. import util
from ..util.sqla_compat import compiles
if TYPE_CHECKING:
from sqlalchemy.engine.reflection import Inspector
@@ -65,7 +59,7 @@ class SQLiteImpl(DefaultImpl):
) and isinstance(col.server_default.arg, sql.ClauseElement):
return True
elif (
isinstance(col.server_default, Computed)
isinstance(col.server_default, util.sqla_compat.Computed)
and col.server_default.persisted
):
return True
@@ -77,13 +71,13 @@ class SQLiteImpl(DefaultImpl):
def add_constraint(self, const: Constraint):
# attempt to distinguish between an
# auto-gen constraint and an explicit one
if const._create_rule is None:
if const._create_rule is None: # type:ignore[attr-defined]
raise NotImplementedError(
"No support for ALTER of constraints in SQLite dialect. "
"Please refer to the batch mode feature which allows for "
"SQLite migrations using a copy-and-move strategy."
)
elif const._create_rule(self):
elif const._create_rule(self): # type:ignore[attr-defined]
util.warn(
"Skipping unsupported ALTER for "
"creation of implicit constraint. "
@@ -91,8 +85,8 @@ class SQLiteImpl(DefaultImpl):
"SQLite migrations using a copy-and-move strategy."
)
def drop_constraint(self, const: Constraint, **kw: Any):
if const._create_rule is None:
def drop_constraint(self, const: Constraint):
if const._create_rule is None: # type:ignore[attr-defined]
raise NotImplementedError(
"No support for ALTER of constraints in SQLite dialect. "
"Please refer to the batch mode feature which allows for "
@@ -183,7 +177,8 @@ class SQLiteImpl(DefaultImpl):
new_type: TypeEngine,
) -> None:
if (
existing.type._type_affinity is not new_type._type_affinity
existing.type._type_affinity # type:ignore[attr-defined]
is not new_type._type_affinity # type:ignore[attr-defined]
and not isinstance(new_type, JSON)
):
existing_transfer["expr"] = cast(
@@ -210,15 +205,6 @@ def visit_rename_table(
)
@compiles(ColumnName, "sqlite")
def visit_column_name(element: ColumnName, compiler: DDLCompiler, **kw) -> str:
return "%s RENAME COLUMN %s TO %s" % (
alter_table(compiler, element.table_name, element.schema),
format_column_name(compiler, element.column_name),
format_column_name(compiler, element.newname),
)
# @compiles(AddColumn, 'sqlite')
# def visit_add_column(element, compiler, **kw):
# return "%s %s" % (

View File

@@ -12,7 +12,6 @@ from typing import List
from typing import Literal
from typing import Mapping
from typing import Optional
from typing import overload
from typing import Sequence
from typing import Tuple
from typing import Type
@@ -27,6 +26,7 @@ if TYPE_CHECKING:
from sqlalchemy.sql.elements import conv
from sqlalchemy.sql.elements import TextClause
from sqlalchemy.sql.expression import TableClause
from sqlalchemy.sql.functions import Function
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.schema import Computed
from sqlalchemy.sql.schema import Identity
@@ -35,36 +35,16 @@ if TYPE_CHECKING:
from sqlalchemy.sql.type_api import TypeEngine
from sqlalchemy.util import immutabledict
from .operations.base import BatchOperations
from .operations.ops import AddColumnOp
from .operations.ops import AddConstraintOp
from .operations.ops import AlterColumnOp
from .operations.ops import AlterTableOp
from .operations.ops import BulkInsertOp
from .operations.ops import CreateIndexOp
from .operations.ops import CreateTableCommentOp
from .operations.ops import CreateTableOp
from .operations.ops import DropColumnOp
from .operations.ops import DropConstraintOp
from .operations.ops import DropIndexOp
from .operations.ops import DropTableCommentOp
from .operations.ops import DropTableOp
from .operations.ops import ExecuteSQLOp
from .operations.ops import BatchOperations
from .operations.ops import MigrateOperation
from .runtime.migration import MigrationContext
from .util.sqla_compat import _literal_bindparam
_T = TypeVar("_T")
_C = TypeVar("_C", bound=Callable[..., Any])
### end imports ###
def add_column(
table_name: str,
column: Column[Any],
*,
schema: Optional[str] = None,
if_not_exists: Optional[bool] = None,
table_name: str, column: Column[Any], *, schema: Optional[str] = None
) -> None:
"""Issue an "add column" instruction using the current
migration context.
@@ -141,10 +121,6 @@ def add_column(
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
:param if_not_exists: If True, adds IF NOT EXISTS operator
when creating the new column for compatible dialects
.. versionadded:: 1.16.0
"""
@@ -154,14 +130,12 @@ def alter_column(
*,
nullable: Optional[bool] = None,
comment: Union[str, Literal[False], None] = False,
server_default: Union[
str, bool, Identity, Computed, TextClause, None
] = False,
server_default: Any = False,
new_column_name: Optional[str] = None,
type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None,
existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None,
type_: Union[TypeEngine, Type[TypeEngine], None] = None,
existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
existing_server_default: Union[
str, bool, Identity, Computed, TextClause, None
str, bool, Identity, Computed, None
] = False,
existing_nullable: Optional[bool] = None,
existing_comment: Optional[str] = None,
@@ -256,7 +230,7 @@ def batch_alter_table(
table_name: str,
schema: Optional[str] = None,
recreate: Literal["auto", "always", "never"] = "auto",
partial_reordering: Optional[Tuple[Any, ...]] = None,
partial_reordering: Optional[tuple] = None,
copy_from: Optional[Table] = None,
table_args: Tuple[Any, ...] = (),
table_kwargs: Mapping[str, Any] = immutabledict({}),
@@ -403,7 +377,7 @@ def batch_alter_table(
def bulk_insert(
table: Union[Table, TableClause],
rows: List[Dict[str, Any]],
rows: List[dict],
*,
multiinsert: bool = True,
) -> None:
@@ -659,7 +633,7 @@ def create_foreign_key(
def create_index(
index_name: Optional[str],
table_name: str,
columns: Sequence[Union[str, TextClause, ColumnElement[Any]]],
columns: Sequence[Union[str, TextClause, Function[Any]]],
*,
schema: Optional[str] = None,
unique: bool = False,
@@ -756,12 +730,7 @@ def create_primary_key(
"""
def create_table(
table_name: str,
*columns: SchemaItem,
if_not_exists: Optional[bool] = None,
**kw: Any,
) -> Table:
def create_table(table_name: str, *columns: SchemaItem, **kw: Any) -> Table:
r"""Issue a "create table" instruction using the current migration
context.
@@ -832,10 +801,6 @@ def create_table(
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
:param if_not_exists: If True, adds IF NOT EXISTS operator when
creating the new table.
.. versionadded:: 1.13.3
:param \**kw: Other keyword arguments are passed to the underlying
:class:`sqlalchemy.schema.Table` object created for the command.
@@ -935,11 +900,6 @@ def drop_column(
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
:param if_exists: If True, adds IF EXISTS operator when
dropping the new column for compatible dialects
.. versionadded:: 1.16.0
:param mssql_drop_check: Optional boolean. When ``True``, on
Microsoft SQL Server only, first
drop the CHECK constraint on the column using a
@@ -961,6 +921,7 @@ def drop_column(
then exec's a separate DROP CONSTRAINT for that default. Only
works if the column has exactly one FK constraint which refers to
it, at the moment.
"""
def drop_constraint(
@@ -969,7 +930,6 @@ def drop_constraint(
type_: Optional[str] = None,
*,
schema: Optional[str] = None,
if_exists: Optional[bool] = None,
) -> None:
r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
@@ -981,10 +941,6 @@ def drop_constraint(
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
:param if_exists: If True, adds IF EXISTS operator when
dropping the constraint
.. versionadded:: 1.16.0
"""
@@ -1025,11 +981,7 @@ def drop_index(
"""
def drop_table(
table_name: str,
*,
schema: Optional[str] = None,
if_exists: Optional[bool] = None,
**kw: Any,
table_name: str, *, schema: Optional[str] = None, **kw: Any
) -> None:
r"""Issue a "drop table" instruction using the current
migration context.
@@ -1044,10 +996,6 @@ def drop_table(
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
:param if_exists: If True, adds IF EXISTS operator when
dropping the table.
.. versionadded:: 1.13.3
:param \**kw: Other keyword arguments are passed to the underlying
:class:`sqlalchemy.schema.Table` object created for the command.
@@ -1184,7 +1132,7 @@ def f(name: str) -> conv:
names will be converted along conventions. If the ``target_metadata``
contains the naming convention
``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the
output of the following::
output of the following:
op.add_column("t", "x", Boolean(name="x"))
@@ -1214,7 +1162,7 @@ def get_context() -> MigrationContext:
"""
def implementation_for(op_cls: Any) -> Callable[[_C], _C]:
def implementation_for(op_cls: Any) -> Callable[..., Any]:
"""Register an implementation for a given :class:`.MigrateOperation`.
This is part of the operation extensibility API.
@@ -1226,7 +1174,7 @@ def implementation_for(op_cls: Any) -> Callable[[_C], _C]:
"""
def inline_literal(
value: Union[str, int], type_: Optional[TypeEngine[Any]] = None
value: Union[str, int], type_: Optional[TypeEngine] = None
) -> _literal_bindparam:
r"""Produce an 'inline literal' expression, suitable for
using in an INSERT, UPDATE, or DELETE statement.
@@ -1270,27 +1218,6 @@ def inline_literal(
"""
@overload
def invoke(operation: CreateTableOp) -> Table: ...
@overload
def invoke(
operation: Union[
AddConstraintOp,
DropConstraintOp,
CreateIndexOp,
DropIndexOp,
AddColumnOp,
AlterColumnOp,
AlterTableOp,
CreateTableCommentOp,
DropTableCommentOp,
DropColumnOp,
BulkInsertOp,
DropTableOp,
ExecuteSQLOp,
],
) -> None: ...
@overload
def invoke(operation: MigrateOperation) -> Any:
"""Given a :class:`.MigrateOperation`, invoke it in terms of
this :class:`.Operations` instance.
@@ -1299,7 +1226,7 @@ def invoke(operation: MigrateOperation) -> Any:
def register_operation(
name: str, sourcename: Optional[str] = None
) -> Callable[[Type[_T]], Type[_T]]:
) -> Callable[[_T], _T]:
"""Register a new operation for this class.
This method is normally used to add new operations

View File

@@ -1,5 +1,3 @@
# mypy: allow-untyped-calls
from __future__ import annotations
from contextlib import contextmanager
@@ -12,9 +10,7 @@ from typing import Dict
from typing import Iterator
from typing import List # noqa
from typing import Mapping
from typing import NoReturn
from typing import Optional
from typing import overload
from typing import Sequence # noqa
from typing import Tuple
from typing import Type # noqa
@@ -43,6 +39,7 @@ if TYPE_CHECKING:
from sqlalchemy.sql.expression import ColumnElement
from sqlalchemy.sql.expression import TableClause
from sqlalchemy.sql.expression import TextClause
from sqlalchemy.sql.functions import Function
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.schema import Computed
from sqlalchemy.sql.schema import Identity
@@ -50,28 +47,12 @@ if TYPE_CHECKING:
from sqlalchemy.types import TypeEngine
from .batch import BatchOperationsImpl
from .ops import AddColumnOp
from .ops import AddConstraintOp
from .ops import AlterColumnOp
from .ops import AlterTableOp
from .ops import BulkInsertOp
from .ops import CreateIndexOp
from .ops import CreateTableCommentOp
from .ops import CreateTableOp
from .ops import DropColumnOp
from .ops import DropConstraintOp
from .ops import DropIndexOp
from .ops import DropTableCommentOp
from .ops import DropTableOp
from .ops import ExecuteSQLOp
from .ops import MigrateOperation
from ..ddl import DefaultImpl
from ..runtime.migration import MigrationContext
__all__ = ("Operations", "BatchOperations")
_T = TypeVar("_T")
_C = TypeVar("_C", bound=Callable[..., Any])
class AbstractOperations(util.ModuleClsProxy):
"""Base class for Operations and BatchOperations.
@@ -105,7 +86,7 @@ class AbstractOperations(util.ModuleClsProxy):
@classmethod
def register_operation(
cls, name: str, sourcename: Optional[str] = None
) -> Callable[[Type[_T]], Type[_T]]:
) -> Callable[[_T], _T]:
"""Register a new operation for this class.
This method is normally used to add new operations
@@ -122,7 +103,7 @@ class AbstractOperations(util.ModuleClsProxy):
"""
def register(op_cls: Type[_T]) -> Type[_T]:
def register(op_cls):
if sourcename is None:
fn = getattr(op_cls, name)
source_name = fn.__name__
@@ -141,11 +122,8 @@ class AbstractOperations(util.ModuleClsProxy):
*spec, formatannotation=formatannotation_fwdref
)
num_defaults = len(spec[3]) if spec[3] else 0
defaulted_vals: Tuple[Any, ...]
if num_defaults:
defaulted_vals = tuple(name_args[0 - num_defaults :])
defaulted_vals = name_args[0 - num_defaults :]
else:
defaulted_vals = ()
@@ -186,7 +164,7 @@ class AbstractOperations(util.ModuleClsProxy):
globals_ = dict(globals())
globals_.update({"op_cls": op_cls})
lcl: Dict[str, Any] = {}
lcl = {}
exec(func_text, globals_, lcl)
setattr(cls, name, lcl[name])
@@ -202,7 +180,7 @@ class AbstractOperations(util.ModuleClsProxy):
return register
@classmethod
def implementation_for(cls, op_cls: Any) -> Callable[[_C], _C]:
def implementation_for(cls, op_cls: Any) -> Callable[..., Any]:
"""Register an implementation for a given :class:`.MigrateOperation`.
This is part of the operation extensibility API.
@@ -213,7 +191,7 @@ class AbstractOperations(util.ModuleClsProxy):
"""
def decorate(fn: _C) -> _C:
def decorate(fn):
cls._to_impl.dispatch_for(op_cls)(fn)
return fn
@@ -235,7 +213,7 @@ class AbstractOperations(util.ModuleClsProxy):
table_name: str,
schema: Optional[str] = None,
recreate: Literal["auto", "always", "never"] = "auto",
partial_reordering: Optional[Tuple[Any, ...]] = None,
partial_reordering: Optional[tuple] = None,
copy_from: Optional[Table] = None,
table_args: Tuple[Any, ...] = (),
table_kwargs: Mapping[str, Any] = util.immutabledict(),
@@ -404,32 +382,6 @@ class AbstractOperations(util.ModuleClsProxy):
return self.migration_context
@overload
def invoke(self, operation: CreateTableOp) -> Table: ...
@overload
def invoke(
self,
operation: Union[
AddConstraintOp,
DropConstraintOp,
CreateIndexOp,
DropIndexOp,
AddColumnOp,
AlterColumnOp,
AlterTableOp,
CreateTableCommentOp,
DropTableCommentOp,
DropColumnOp,
BulkInsertOp,
DropTableOp,
ExecuteSQLOp,
],
) -> None: ...
@overload
def invoke(self, operation: MigrateOperation) -> Any: ...
def invoke(self, operation: MigrateOperation) -> Any:
"""Given a :class:`.MigrateOperation`, invoke it in terms of
this :class:`.Operations` instance.
@@ -464,7 +416,7 @@ class AbstractOperations(util.ModuleClsProxy):
names will be converted along conventions. If the ``target_metadata``
contains the naming convention
``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the
output of the following::
output of the following:
op.add_column("t", "x", Boolean(name="x"))
@@ -618,7 +570,6 @@ class Operations(AbstractOperations):
column: Column[Any],
*,
schema: Optional[str] = None,
if_not_exists: Optional[bool] = None,
) -> None:
"""Issue an "add column" instruction using the current
migration context.
@@ -695,10 +646,6 @@ class Operations(AbstractOperations):
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
:param if_not_exists: If True, adds IF NOT EXISTS operator
when creating the new column for compatible dialects
.. versionadded:: 1.16.0
""" # noqa: E501
...
@@ -710,16 +657,12 @@ class Operations(AbstractOperations):
*,
nullable: Optional[bool] = None,
comment: Union[str, Literal[False], None] = False,
server_default: Union[
str, bool, Identity, Computed, TextClause, None
] = False,
server_default: Any = False,
new_column_name: Optional[str] = None,
type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None,
existing_type: Union[
TypeEngine[Any], Type[TypeEngine[Any]], None
] = None,
type_: Union[TypeEngine, Type[TypeEngine], None] = None,
existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
existing_server_default: Union[
str, bool, Identity, Computed, TextClause, None
str, bool, Identity, Computed, None
] = False,
existing_nullable: Optional[bool] = None,
existing_comment: Optional[str] = None,
@@ -813,7 +756,7 @@ class Operations(AbstractOperations):
def bulk_insert(
self,
table: Union[Table, TableClause],
rows: List[Dict[str, Any]],
rows: List[dict],
*,
multiinsert: bool = True,
) -> None:
@@ -1080,7 +1023,7 @@ class Operations(AbstractOperations):
self,
index_name: Optional[str],
table_name: str,
columns: Sequence[Union[str, TextClause, ColumnElement[Any]]],
columns: Sequence[Union[str, TextClause, Function[Any]]],
*,
schema: Optional[str] = None,
unique: bool = False,
@@ -1181,11 +1124,7 @@ class Operations(AbstractOperations):
...
def create_table(
self,
table_name: str,
*columns: SchemaItem,
if_not_exists: Optional[bool] = None,
**kw: Any,
self, table_name: str, *columns: SchemaItem, **kw: Any
) -> Table:
r"""Issue a "create table" instruction using the current migration
context.
@@ -1257,10 +1196,6 @@ class Operations(AbstractOperations):
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
:param if_not_exists: If True, adds IF NOT EXISTS operator when
creating the new table.
.. versionadded:: 1.13.3
:param \**kw: Other keyword arguments are passed to the underlying
:class:`sqlalchemy.schema.Table` object created for the command.
@@ -1366,11 +1301,6 @@ class Operations(AbstractOperations):
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
:param if_exists: If True, adds IF EXISTS operator when
dropping the new column for compatible dialects
.. versionadded:: 1.16.0
:param mssql_drop_check: Optional boolean. When ``True``, on
Microsoft SQL Server only, first
drop the CHECK constraint on the column using a
@@ -1392,6 +1322,7 @@ class Operations(AbstractOperations):
then exec's a separate DROP CONSTRAINT for that default. Only
works if the column has exactly one FK constraint which refers to
it, at the moment.
""" # noqa: E501
...
@@ -1402,7 +1333,6 @@ class Operations(AbstractOperations):
type_: Optional[str] = None,
*,
schema: Optional[str] = None,
if_exists: Optional[bool] = None,
) -> None:
r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
@@ -1414,10 +1344,6 @@ class Operations(AbstractOperations):
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
:param if_exists: If True, adds IF EXISTS operator when
dropping the constraint
.. versionadded:: 1.16.0
""" # noqa: E501
...
@@ -1461,12 +1387,7 @@ class Operations(AbstractOperations):
...
def drop_table(
self,
table_name: str,
*,
schema: Optional[str] = None,
if_exists: Optional[bool] = None,
**kw: Any,
self, table_name: str, *, schema: Optional[str] = None, **kw: Any
) -> None:
r"""Issue a "drop table" instruction using the current
migration context.
@@ -1481,10 +1402,6 @@ class Operations(AbstractOperations):
quoting of the schema outside of the default behavior, use
the SQLAlchemy construct
:class:`~sqlalchemy.sql.elements.quoted_name`.
:param if_exists: If True, adds IF EXISTS operator when
dropping the table.
.. versionadded:: 1.13.3
:param \**kw: Other keyword arguments are passed to the underlying
:class:`sqlalchemy.schema.Table` object created for the command.
@@ -1643,7 +1560,7 @@ class BatchOperations(AbstractOperations):
impl: BatchOperationsImpl
def _noop(self, operation: Any) -> NoReturn:
def _noop(self, operation):
raise NotImplementedError(
"The %s method does not apply to a batch table alter operation."
% operation
@@ -1660,7 +1577,6 @@ class BatchOperations(AbstractOperations):
*,
insert_before: Optional[str] = None,
insert_after: Optional[str] = None,
if_not_exists: Optional[bool] = None,
) -> None:
"""Issue an "add column" instruction using the current
batch migration context.
@@ -1680,10 +1596,8 @@ class BatchOperations(AbstractOperations):
comment: Union[str, Literal[False], None] = False,
server_default: Any = False,
new_column_name: Optional[str] = None,
type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None,
existing_type: Union[
TypeEngine[Any], Type[TypeEngine[Any]], None
] = None,
type_: Union[TypeEngine, Type[TypeEngine], None] = None,
existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
existing_server_default: Union[
str, bool, Identity, Computed, None
] = False,
@@ -1738,7 +1652,7 @@ class BatchOperations(AbstractOperations):
def create_exclude_constraint(
self, constraint_name: str, *elements: Any, **kw: Any
) -> Optional[Table]:
):
"""Issue a "create exclude constraint" instruction using the
current batch migration context.
@@ -1754,7 +1668,7 @@ class BatchOperations(AbstractOperations):
def create_foreign_key(
self,
constraint_name: Optional[str],
constraint_name: str,
referent_table: str,
local_cols: List[str],
remote_cols: List[str],
@@ -1804,7 +1718,7 @@ class BatchOperations(AbstractOperations):
...
def create_primary_key(
self, constraint_name: Optional[str], columns: List[str]
self, constraint_name: str, columns: List[str]
) -> None:
"""Issue a "create primary key" instruction using the
current batch migration context.

View File

@@ -1,6 +1,3 @@
# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
# mypy: no-warn-return-any, allow-any-generics
from __future__ import annotations
from typing import Any
@@ -18,10 +15,9 @@ from sqlalchemy import Index
from sqlalchemy import MetaData
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import schema as sql_schema
from sqlalchemy import select
from sqlalchemy import Table
from sqlalchemy import types as sqltypes
from sqlalchemy.sql.schema import SchemaEventTarget
from sqlalchemy.events import SchemaEventTarget
from sqlalchemy.util import OrderedDict
from sqlalchemy.util import topological
@@ -32,9 +28,11 @@ from ..util.sqla_compat import _copy_expression
from ..util.sqla_compat import _ensure_scope_for_ddl
from ..util.sqla_compat import _fk_is_self_referential
from ..util.sqla_compat import _idx_table_bound_expressions
from ..util.sqla_compat import _insert_inline
from ..util.sqla_compat import _is_type_bound
from ..util.sqla_compat import _remove_column_from_collection
from ..util.sqla_compat import _resolve_for_variant
from ..util.sqla_compat import _select
from ..util.sqla_compat import constraint_name_defined
from ..util.sqla_compat import constraint_name_string
@@ -376,7 +374,7 @@ class ApplyBatchImpl:
for idx_existing in self.indexes.values():
# this is a lift-and-move from Table.to_metadata
if idx_existing._column_flag:
if idx_existing._column_flag: # type: ignore
continue
idx_copy = Index(
@@ -405,7 +403,9 @@ class ApplyBatchImpl:
def _setup_referent(
self, metadata: MetaData, constraint: ForeignKeyConstraint
) -> None:
spec = constraint.elements[0]._get_colspec()
spec = constraint.elements[
0
]._get_colspec() # type:ignore[attr-defined]
parts = spec.split(".")
tname = parts[-2]
if len(parts) == 3:
@@ -448,15 +448,13 @@ class ApplyBatchImpl:
try:
op_impl._exec(
self.new_table.insert()
.inline()
.from_select(
_insert_inline(self.new_table).from_select(
list(
k
for k, transfer in self.column_transfers.items()
if "expr" in transfer
),
select(
_select(
*[
transfer["expr"]
for transfer in self.column_transfers.values()
@@ -548,7 +546,9 @@ class ApplyBatchImpl:
else:
sql_schema.DefaultClause(
server_default # type: ignore[arg-type]
)._set_parent(existing)
)._set_parent( # type:ignore[attr-defined]
existing
)
if autoincrement is not None:
existing.autoincrement = bool(autoincrement)

Some files were not shown because too many files have changed in this diff Show More