mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-02-02 00:25:06 +08:00
feat(tools): add Elasticsearch to OceanBase migration tool (#12927)
### What problem does this PR solve? fixes https://github.com/infiniflow/ragflow/issues/12774 Add a CLI tool for migrating RAGFlow data from Elasticsearch to OceanBase, enabling users to switch their document storage backend. - Automatic discovery and migration of all `ragflow_*` indices - Schema conversion with vector dimension auto-detection - Batch processing with progress tracking and resume capability - Data consistency validation and migration report generation **Note**: Due to network issues, I was unable to pull the required Docker images (Elasticsearch, OceanBase) to run the full end-to-end verification. Unit tests have been verified to pass. I will complete the e2e verification when network conditions allow, and submit a follow-up PR if any fixes are needed. ```bash ============================= test session starts ============================== platform darwin -- Python 3.13.6, pytest-9.0.2, pluggy-1.6.0 rootdir: /Users/sevenc/code/ai/oceanbase/ragflow/tools/es-to-oceanbase-migration configfile: pyproject.toml testpaths: tests plugins: anyio-4.12.1, asyncio-1.3.0, cov-7.0.0 collected 86 items tests/test_progress.py::TestMigrationProgress::test_create_basic_progress PASSED [ 1%] tests/test_progress.py::TestMigrationProgress::test_create_progress_with_counts PASSED [ 2%] tests/test_progress.py::TestMigrationProgress::test_progress_default_values PASSED [ 3%] tests/test_progress.py::TestMigrationProgress::test_progress_status_values PASSED [ 4%] tests/test_progress.py::TestProgressManager::test_create_progress_manager PASSED [ 5%] tests/test_progress.py::TestProgressManager::test_create_progress_manager_creates_dir PASSED [ 6%] tests/test_progress.py::TestProgressManager::test_create_progress PASSED [ 8%] tests/test_progress.py::TestProgressManager::test_save_and_load_progress PASSED [ 9%] tests/test_progress.py::TestProgressManager::test_load_nonexistent_progress PASSED [ 10%] tests/test_progress.py::TestProgressManager::test_delete_progress PASSED [ 11%] tests/test_progress.py::TestProgressManager::test_update_progress PASSED [ 12%] tests/test_progress.py::TestProgressManager::test_update_progress_multiple_batches PASSED [ 13%] tests/test_progress.py::TestProgressManager::test_mark_completed PASSED [ 15%] tests/test_progress.py::TestProgressManager::test_mark_failed PASSED [ 16%] tests/test_progress.py::TestProgressManager::test_mark_paused PASSED [ 17%] tests/test_progress.py::TestProgressManager::test_can_resume_running PASSED [ 18%] tests/test_progress.py::TestProgressManager::test_can_resume_paused PASSED [ 19%] tests/test_progress.py::TestProgressManager::test_can_resume_completed PASSED [ 20%] tests/test_progress.py::TestProgressManager::test_can_resume_nonexistent PASSED [ 22%] tests/test_progress.py::TestProgressManager::test_get_resume_info PASSED [ 23%] tests/test_progress.py::TestProgressManager::test_get_resume_info_nonexistent PASSED [ 24%] tests/test_progress.py::TestProgressManager::test_progress_file_path PASSED [ 25%] tests/test_progress.py::TestProgressManager::test_progress_file_content PASSED [ 26%] tests/test_schema.py::TestRAGFlowSchemaConverter::test_analyze_ragflow_mapping PASSED [ 27%] tests/test_schema.py::TestRAGFlowSchemaConverter::test_detect_vector_size PASSED [ 29%] tests/test_schema.py::TestRAGFlowSchemaConverter::test_unknown_fields PASSED [ 30%] tests/test_schema.py::TestRAGFlowSchemaConverter::test_get_column_definitions PASSED [ 31%] tests/test_schema.py::TestRAGFlowDataConverter::test_convert_basic_document PASSED [ 32%] tests/test_schema.py::TestRAGFlowDataConverter::test_convert_with_vector PASSED [ 33%] tests/test_schema.py::TestRAGFlowDataConverter::test_convert_array_fields PASSED [ 34%] tests/test_schema.py::TestRAGFlowDataConverter::test_convert_json_fields PASSED [ 36%] tests/test_schema.py::TestRAGFlowDataConverter::test_convert_unknown_fields_to_extra PASSED [ 37%] tests/test_schema.py::TestRAGFlowDataConverter::test_convert_kb_id_list PASSED [ 38%] tests/test_schema.py::TestRAGFlowDataConverter::test_convert_content_with_weight_dict PASSED [ 39%] tests/test_schema.py::TestRAGFlowDataConverter::test_convert_batch PASSED [ 40%] tests/test_schema.py::TestVectorFieldPattern::test_valid_patterns PASSED [ 41%] tests/test_schema.py::TestVectorFieldPattern::test_invalid_patterns PASSED [ 43%] tests/test_schema.py::TestVectorFieldPattern::test_extract_dimension PASSED [ 44%] tests/test_schema.py::TestConstants::test_array_columns PASSED [ 45%] tests/test_schema.py::TestConstants::test_json_columns PASSED [ 46%] tests/test_schema.py::TestConstants::test_ragflow_columns_completeness PASSED [ 47%] tests/test_schema.py::TestConstants::test_fts_columns PASSED [ 48%] tests/test_schema.py::TestConstants::test_ragflow_columns_types PASSED [ 50%] tests/test_schema.py::TestRAGFlowSchemaConverterEdgeCases::test_empty_mapping PASSED [ 51%] tests/test_schema.py::TestRAGFlowSchemaConverterEdgeCases::test_mapping_without_properties PASSED [ 52%] tests/test_schema.py::TestRAGFlowSchemaConverterEdgeCases::test_multiple_vector_fields PASSED [ 53%] tests/test_schema.py::TestRAGFlowSchemaConverterEdgeCases::test_get_column_definitions_without_analysis PASSED [ 54%] tests/test_schema.py::TestRAGFlowSchemaConverterEdgeCases::test_get_vector_fields PASSED [ 55%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_empty_document PASSED [ 56%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_document_without_source PASSED [ 58%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_boolean_to_integer PASSED [ 59%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_invalid_integer PASSED [ 60%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_float_field PASSED [ 61%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_array_with_special_characters PASSED [ 62%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_already_json_array PASSED [ 63%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_single_value_to_array PASSED [ 65%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_detect_vector_fields_from_document PASSED [ 66%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_with_default_values PASSED [ 67%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_list_content PASSED [ 68%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_convert_batch_empty PASSED [ 69%] tests/test_schema.py::TestRAGFlowDataConverterEdgeCases::test_existing_extra_field_merged PASSED [ 70%] tests/test_verify.py::TestVerificationResult::test_create_basic_result PASSED [ 72%] tests/test_verify.py::TestVerificationResult::test_result_default_values PASSED [ 73%] tests/test_verify.py::TestVerificationResult::test_result_with_counts PASSED [ 74%] tests/test_verify.py::TestMigrationVerifier::test_verify_counts_match PASSED [ 75%] tests/test_verify.py::TestMigrationVerifier::test_verify_counts_mismatch PASSED [ 76%] tests/test_verify.py::TestMigrationVerifier::test_verify_samples_all_match PASSED [ 77%] tests/test_verify.py::TestMigrationVerifier::test_verify_samples_some_missing PASSED [ 79%] tests/test_verify.py::TestMigrationVerifier::test_verify_samples_data_mismatch PASSED [ 80%] tests/test_verify.py::TestMigrationVerifier::test_values_equal_none_values PASSED [ 81%] tests/test_verify.py::TestMigrationVerifier::test_values_equal_array_columns PASSED [ 82%] tests/test_verify.py::TestMigrationVerifier::test_values_equal_json_columns PASSED [ 83%] tests/test_verify.py::TestMigrationVerifier::test_values_equal_kb_id_list PASSED [ 84%] tests/test_verify.py::TestMigrationVerifier::test_values_equal_content_with_weight_dict PASSED [ 86%] tests/test_verify.py::TestMigrationVerifier::test_determine_result_passed PASSED [ 87%] tests/test_verify.py::TestMigrationVerifier::test_determine_result_failed_count PASSED [ 88%] tests/test_verify.py::TestMigrationVerifier::test_determine_result_failed_samples PASSED [ 89%] tests/test_verify.py::TestMigrationVerifier::test_generate_report PASSED [ 90%] tests/test_verify.py::TestMigrationVerifier::test_generate_report_with_missing PASSED [ 91%] tests/test_verify.py::TestMigrationVerifier::test_generate_report_with_mismatches PASSED [ 93%] tests/test_verify.py::TestValueComparison::test_string_comparison PASSED [ 94%] tests/test_verify.py::TestValueComparison::test_integer_comparison PASSED [ 95%] tests/test_verify.py::TestValueComparison::test_float_comparison PASSED [ 96%] tests/test_verify.py::TestValueComparison::test_boolean_comparison PASSED [ 97%] tests/test_verify.py::TestValueComparison::test_empty_array_comparison PASSED [ 98%] tests/test_verify.py::TestValueComparison::test_nested_json_comparison PASSED [100%] ======================= 86 passed, 88 warnings in 0.66s ======================== ``` ### Type of change - [ ] Bug Fix (non-breaking change which fixes an issue) - [x] New Feature (non-breaking change which adds functionality) - [ ] Documentation Update - [ ] Refactoring - [ ] Performance Improvement - [ ] Other (please describe):
This commit is contained in:
499
tools/es-to-oceanbase-migration/README.md
Normal file
499
tools/es-to-oceanbase-migration/README.md
Normal file
@ -0,0 +1,499 @@
|
|||||||
|
# RAGFlow ES to OceanBase Migration Tool
|
||||||
|
|
||||||
|
A CLI tool for migrating RAGFlow data from Elasticsearch to OceanBase. This tool is specifically designed for RAGFlow's data structure and handles schema conversion, vector data mapping, batch import, and resume capability.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **RAGFlow-Specific**: Designed for RAGFlow's fixed data schema
|
||||||
|
- **ES 8+ Support**: Uses `search_after` API for efficient data scrolling
|
||||||
|
- **Vector Support**: Auto-detects vector field dimensions from ES mapping
|
||||||
|
- **Batch Processing**: Configurable batch size for optimal performance
|
||||||
|
- **Resume Capability**: Save and resume migration progress
|
||||||
|
- **Data Consistency Validation**: Compare document counts and sample data
|
||||||
|
- **Migration Report Generation**: Generate detailed migration reports
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
This section provides a complete guide to verify the migration works correctly with a real RAGFlow deployment.
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- RAGFlow source code cloned
|
||||||
|
- Docker and Docker Compose installed
|
||||||
|
- This migration tool installed (`uv pip install -e .`)
|
||||||
|
|
||||||
|
### Step 1: Start RAGFlow with Elasticsearch Backend
|
||||||
|
|
||||||
|
First, start RAGFlow using Elasticsearch as the document storage backend (default configuration).
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Navigate to RAGFlow docker directory
|
||||||
|
cd /path/to/ragflow/docker
|
||||||
|
|
||||||
|
# Ensure DOC_ENGINE=elasticsearch in .env (this is the default)
|
||||||
|
# DOC_ENGINE=elasticsearch
|
||||||
|
|
||||||
|
# Start RAGFlow with Elasticsearch (--profile cpu for CPU, --profile gpu for GPU)
|
||||||
|
docker compose --profile elasticsearch --profile cpu up -d
|
||||||
|
|
||||||
|
# Wait for services to be ready (this may take a few minutes)
|
||||||
|
docker compose ps
|
||||||
|
|
||||||
|
# Check ES is running
|
||||||
|
curl -X GET "http://localhost:9200/_cluster/health?pretty"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 2: Create Test Data in RAGFlow
|
||||||
|
|
||||||
|
1. Open RAGFlow Web UI: http://localhost:9380
|
||||||
|
2. Create a new Knowledge Base
|
||||||
|
3. Upload some test documents (PDF, TXT, DOCX, etc.)
|
||||||
|
4. Wait for the documents to be parsed and indexed
|
||||||
|
5. Test the knowledge base with some queries to ensure it works
|
||||||
|
|
||||||
|
### Step 3: Verify ES Data (Optional)
|
||||||
|
|
||||||
|
Before migration, verify the data exists in Elasticsearch. This step is important to ensure you have a baseline for comparison after migration.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Navigate to migration tool directory (from ragflow root)
|
||||||
|
cd tools/es-to-oceanbase-migration
|
||||||
|
|
||||||
|
# Activate the virtual environment if not already done
|
||||||
|
source .venv/bin/activate
|
||||||
|
|
||||||
|
# Check connection and list indices
|
||||||
|
es-ob-migrate status --es-host localhost --es-port 9200
|
||||||
|
|
||||||
|
# First, find your actual index name (pattern: ragflow_{tenant_id})
|
||||||
|
curl -X GET "http://localhost:9200/_cat/indices/ragflow_*?v"
|
||||||
|
|
||||||
|
# List all knowledge bases in the index
|
||||||
|
# Replace ragflow_{tenant_id} with your actual index from the curl output above
|
||||||
|
es-ob-migrate list-kb --es-host localhost --es-port 9200 --index ragflow_{tenant_id}
|
||||||
|
|
||||||
|
# View sample documents
|
||||||
|
es-ob-migrate sample --es-host localhost --es-port 9200 --index ragflow_{tenant_id} --size 5
|
||||||
|
|
||||||
|
# Check schema
|
||||||
|
es-ob-migrate schema --es-host localhost --es-port 9200 --index ragflow_{tenant_id}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 4: Start OceanBase for Migration
|
||||||
|
|
||||||
|
Start RAGFlow's OceanBase service as the migration target:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Navigate to ragflow docker directory (from ragflow root)
|
||||||
|
cd ../docker
|
||||||
|
|
||||||
|
# Start only OceanBase service from RAGFlow docker compose
|
||||||
|
docker compose --profile oceanbase up -d
|
||||||
|
|
||||||
|
# Wait for OceanBase to be ready
|
||||||
|
docker compose logs -f oceanbase
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 5: Run Migration
|
||||||
|
|
||||||
|
Execute the migration from Elasticsearch to OceanBase:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd ../tools/es-to-oceanbase-migration
|
||||||
|
|
||||||
|
# Option A: Migrate ALL ragflow_* indices (Recommended)
|
||||||
|
# If --index and --table are omitted, the tool auto-discovers all ragflow_* indices
|
||||||
|
es-ob-migrate migrate \
|
||||||
|
--es-host localhost --es-port 9200 \
|
||||||
|
--ob-host localhost --ob-port 2881 \
|
||||||
|
--ob-user "root@ragflow" --ob-password "infini_rag_flow" \
|
||||||
|
--ob-database ragflow_doc \
|
||||||
|
--batch-size 1000 \
|
||||||
|
--verify
|
||||||
|
|
||||||
|
# Option B: Migrate a specific index
|
||||||
|
# Use the SAME name for both --index and --table
|
||||||
|
# The index name pattern is: ragflow_{tenant_id}
|
||||||
|
# Find your tenant_id from Step 3's curl output
|
||||||
|
es-ob-migrate migrate \
|
||||||
|
--es-host localhost --es-port 9200 \
|
||||||
|
--ob-host localhost --ob-port 2881 \
|
||||||
|
--ob-user "root@ragflow" --ob-password "infini_rag_flow" \
|
||||||
|
--ob-database ragflow_doc \
|
||||||
|
--index ragflow_{tenant_id} \
|
||||||
|
--table ragflow_{tenant_id} \
|
||||||
|
--batch-size 1000 \
|
||||||
|
--verify
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected output:
|
||||||
|
```
|
||||||
|
RAGFlow ES to OceanBase Migration
|
||||||
|
Source: localhost:9200/ragflow_{tenant_id}
|
||||||
|
Target: localhost:2881/ragflow_doc.ragflow_{tenant_id}
|
||||||
|
|
||||||
|
Step 1: Checking connections...
|
||||||
|
ES cluster status: green
|
||||||
|
OceanBase connection: OK (version: 4.3.5.1)
|
||||||
|
|
||||||
|
Step 2: Analyzing ES index...
|
||||||
|
Auto-detected vector dimension: 1024
|
||||||
|
Known RAGFlow fields: 25
|
||||||
|
Total documents: 1,234
|
||||||
|
|
||||||
|
Step 3: Creating OceanBase table...
|
||||||
|
Created table 'ragflow_{tenant_id}' with RAGFlow schema
|
||||||
|
|
||||||
|
Step 4: Migrating data...
|
||||||
|
Migrating... ━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100% 1,234/1,234
|
||||||
|
|
||||||
|
Step 5: Verifying migration...
|
||||||
|
✓ Document counts match: 1,234
|
||||||
|
✓ Sample verification: 100/100 matched
|
||||||
|
|
||||||
|
Migration completed successfully!
|
||||||
|
Total: 1,234 documents
|
||||||
|
Migrated: 1,234 documents
|
||||||
|
Failed: 0 documents
|
||||||
|
Duration: 45.2 seconds
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 6: Stop RAGFlow and Switch to OceanBase Backend
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Navigate to ragflow docker directory
|
||||||
|
cd ../../docker
|
||||||
|
|
||||||
|
# Stop only Elasticsearch and RAGFlow (but keep OceanBase running)
|
||||||
|
docker compose --profile elasticsearch --profile cpu down
|
||||||
|
|
||||||
|
# Edit .env file, change:
|
||||||
|
# DOC_ENGINE=elasticsearch -> DOC_ENGINE=oceanbase
|
||||||
|
#
|
||||||
|
# The OceanBase connection settings are already configured by default in .env
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 7: Start RAGFlow with OceanBase Backend
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# OceanBase should still be running from Step 4
|
||||||
|
# Start RAGFlow with OceanBase profile (OceanBase is already running)
|
||||||
|
docker compose --profile oceanbase --profile cpu up -d
|
||||||
|
|
||||||
|
# Wait for services to start
|
||||||
|
docker compose ps
|
||||||
|
|
||||||
|
# Check logs for any errors
|
||||||
|
docker compose logs -f ragflow-cpu
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 8: Data Integrity Verification (Optional)
|
||||||
|
|
||||||
|
Run the verification command to compare ES and OceanBase data:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
es-ob-migrate verify \
|
||||||
|
--es-host localhost --es-port 9200 \
|
||||||
|
--ob-host localhost --ob-port 2881 \
|
||||||
|
--ob-user "root@ragflow" --ob-password "infini_rag_flow" \
|
||||||
|
--ob-database ragflow_doc \
|
||||||
|
--index ragflow_{tenant_id} \
|
||||||
|
--table ragflow_{tenant_id} \
|
||||||
|
--sample-size 100
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected output:
|
||||||
|
```
|
||||||
|
╭─────────────────────────────────────────────────────────────╮
|
||||||
|
│ Migration Verification Report │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ ES Index: ragflow_{tenant_id} │
|
||||||
|
│ OB Table: ragflow_{tenant_id} │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ Document Counts │
|
||||||
|
│ ES: 1,234 │
|
||||||
|
│ OB: 1,234 │
|
||||||
|
│ Match: ✓ Yes │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ Sample Verification (100 documents) │
|
||||||
|
│ Matched: 100 │
|
||||||
|
│ Match Rate: 100.0% │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ Result: ✓ PASSED │
|
||||||
|
╰─────────────────────────────────────────────────────────────╯
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 9: Verify RAGFlow Works with OceanBase
|
||||||
|
|
||||||
|
1. Open RAGFlow Web UI: http://localhost:9380
|
||||||
|
2. Navigate to your Knowledge Base
|
||||||
|
3. Try the same queries you tested before migration
|
||||||
|
|
||||||
|
## CLI Reference
|
||||||
|
|
||||||
|
### `es-ob-migrate migrate`
|
||||||
|
|
||||||
|
Run data migration from Elasticsearch to OceanBase.
|
||||||
|
|
||||||
|
| Option | Default | Description |
|
||||||
|
|--------|---------|-------------|
|
||||||
|
| `--es-host` | localhost | Elasticsearch host |
|
||||||
|
| `--es-port` | 9200 | Elasticsearch port |
|
||||||
|
| `--es-user` | None | ES username (if auth required) |
|
||||||
|
| `--es-password` | None | ES password |
|
||||||
|
| `--ob-host` | localhost | OceanBase host |
|
||||||
|
| `--ob-port` | 2881 | OceanBase port |
|
||||||
|
| `--ob-user` | root@test | OceanBase user (format: user@tenant) |
|
||||||
|
| `--ob-password` | "" | OceanBase password |
|
||||||
|
| `--ob-database` | test | OceanBase database name |
|
||||||
|
| `-i, --index` | None | Source ES index (omit to migrate all ragflow_* indices) |
|
||||||
|
| `-t, --table` | None | Target OB table (omit to use same name as index) |
|
||||||
|
| `--batch-size` | 1000 | Documents per batch |
|
||||||
|
| `--resume` | False | Resume from previous progress |
|
||||||
|
| `--verify/--no-verify` | True | Verify after migration |
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Migrate all ragflow_* indices
|
||||||
|
es-ob-migrate migrate \
|
||||||
|
--es-host localhost --es-port 9200 \
|
||||||
|
--ob-host localhost --ob-port 2881 \
|
||||||
|
--ob-user "root@ragflow" --ob-password "infini_rag_flow" \
|
||||||
|
--ob-database ragflow_doc
|
||||||
|
|
||||||
|
# Migrate a specific index
|
||||||
|
es-ob-migrate migrate \
|
||||||
|
--es-host localhost --es-port 9200 \
|
||||||
|
--ob-host localhost --ob-port 2881 \
|
||||||
|
--ob-user "root@ragflow" --ob-password "infini_rag_flow" \
|
||||||
|
--ob-database ragflow_doc \
|
||||||
|
--index ragflow_abc123 --table ragflow_abc123
|
||||||
|
|
||||||
|
# Resume interrupted migration
|
||||||
|
es-ob-migrate migrate \
|
||||||
|
--es-host localhost --es-port 9200 \
|
||||||
|
--ob-host localhost --ob-port 2881 \
|
||||||
|
--ob-user "root@ragflow" --ob-password "infini_rag_flow" \
|
||||||
|
--ob-database ragflow_doc \
|
||||||
|
--index ragflow_abc123 --table ragflow_abc123 \
|
||||||
|
--resume
|
||||||
|
```
|
||||||
|
|
||||||
|
**Resume Feature:**
|
||||||
|
|
||||||
|
Migration progress is automatically saved to `.migration_progress/` directory. If migration is interrupted (network error, timeout, etc.), use `--resume` to continue from where it stopped:
|
||||||
|
|
||||||
|
- Progress file: `.migration_progress/{index_name}_progress.json`
|
||||||
|
- Contains: total count, migrated count, last document ID, timestamp
|
||||||
|
- On resume: skips already migrated documents, continues from last position
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
|
||||||
|
```
|
||||||
|
RAGFlow ES to OceanBase Migration
|
||||||
|
Source: localhost:9200/ragflow_abc123
|
||||||
|
Target: localhost:2881/ragflow_doc.ragflow_abc123
|
||||||
|
|
||||||
|
Step 1: Checking connections...
|
||||||
|
ES cluster status: green
|
||||||
|
OceanBase connection: OK
|
||||||
|
|
||||||
|
Step 2: Analyzing ES index...
|
||||||
|
Auto-detected vector dimension: 1024
|
||||||
|
Total documents: 1,234
|
||||||
|
|
||||||
|
Step 3: Creating OceanBase table...
|
||||||
|
Created table 'ragflow_abc123' with RAGFlow schema
|
||||||
|
|
||||||
|
Step 4: Migrating data...
|
||||||
|
Migrating... ━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100% 1,234/1,234
|
||||||
|
|
||||||
|
Migration completed successfully!
|
||||||
|
Total: 1,234 documents
|
||||||
|
Duration: 45.2 seconds
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### `es-ob-migrate list-indices`
|
||||||
|
|
||||||
|
List all RAGFlow indices (`ragflow_*`) in Elasticsearch.
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
es-ob-migrate list-indices --es-host localhost --es-port 9200
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
|
||||||
|
```
|
||||||
|
RAGFlow Indices in Elasticsearch:
|
||||||
|
|
||||||
|
Index Name Documents Type
|
||||||
|
ragflow_abc123def456789 1234 Document Chunks
|
||||||
|
ragflow_doc_meta_abc123def456789 56 Document Metadata
|
||||||
|
|
||||||
|
Total: 2 ragflow_* indices found
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### `es-ob-migrate schema`
|
||||||
|
|
||||||
|
Preview schema analysis from ES mapping.
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
es-ob-migrate schema --es-host localhost --es-port 9200 --index ragflow_abc123
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
|
||||||
|
```
|
||||||
|
RAGFlow Schema Analysis for index: ragflow_abc123
|
||||||
|
|
||||||
|
Vector Fields:
|
||||||
|
q_1024_vec: dense_vector (dim=1024)
|
||||||
|
|
||||||
|
Known RAGFlow Fields (25):
|
||||||
|
id, kb_id, doc_id, docnm_kwd, content_with_weight, content_ltks,
|
||||||
|
available_int, important_kwd, question_kwd, tag_kwd, page_num_int...
|
||||||
|
|
||||||
|
Unknown Fields (stored in 'extra' column):
|
||||||
|
custom_field_1, custom_field_2
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### `es-ob-migrate verify`
|
||||||
|
|
||||||
|
Verify migration data consistency between ES and OceanBase.
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
es-ob-migrate verify \
|
||||||
|
--es-host localhost --es-port 9200 \
|
||||||
|
--ob-host localhost --ob-port 2881 \
|
||||||
|
--ob-user "root@ragflow" --ob-password "infini_rag_flow" \
|
||||||
|
--ob-database ragflow_doc \
|
||||||
|
--index ragflow_abc123 --table ragflow_abc123 \
|
||||||
|
--sample-size 100
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
|
||||||
|
```
|
||||||
|
╭─────────────────────────────────────────────────────────────╮
|
||||||
|
│ Migration Verification Report │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ ES Index: ragflow_abc123 │
|
||||||
|
│ OB Table: ragflow_abc123 │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ Document Counts │
|
||||||
|
│ ES: 1,234 │
|
||||||
|
│ OB: 1,234 │
|
||||||
|
│ Match: ✓ Yes │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ Sample Verification (100 documents) │
|
||||||
|
│ Matched: 100 │
|
||||||
|
│ Match Rate: 100.0% │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ Result: ✓ PASSED │
|
||||||
|
╰─────────────────────────────────────────────────────────────╯
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### `es-ob-migrate list-kb`
|
||||||
|
|
||||||
|
List all knowledge bases in an ES index.
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
es-ob-migrate list-kb --es-host localhost --es-port 9200 --index ragflow_abc123
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
|
||||||
|
```
|
||||||
|
Knowledge Bases in index 'ragflow_abc123':
|
||||||
|
|
||||||
|
KB ID Documents
|
||||||
|
kb_001_finance_docs 456
|
||||||
|
kb_002_technical_manual 321
|
||||||
|
kb_003_product_faq 457
|
||||||
|
|
||||||
|
Total: 3 knowledge bases, 1234 documents
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### `es-ob-migrate sample`
|
||||||
|
|
||||||
|
Show sample documents from ES index.
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
es-ob-migrate sample --es-host localhost --es-port 9200 --index ragflow_abc123 --size 2
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
|
||||||
|
```
|
||||||
|
Sample Documents from 'ragflow_abc123':
|
||||||
|
|
||||||
|
Document 1:
|
||||||
|
id: chunk_001_abc123
|
||||||
|
kb_id: kb_001_finance_docs
|
||||||
|
doc_id: doc_001
|
||||||
|
docnm_kwd: quarterly_report.pdf
|
||||||
|
content_with_weight: The company reported Q3 revenue of $1.2B...
|
||||||
|
available_int: 1
|
||||||
|
|
||||||
|
Document 2:
|
||||||
|
id: chunk_002_def456
|
||||||
|
kb_id: kb_001_finance_docs
|
||||||
|
doc_id: doc_001
|
||||||
|
docnm_kwd: quarterly_report.pdf
|
||||||
|
content_with_weight: Operating expenses decreased by 5%...
|
||||||
|
available_int: 1
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### `es-ob-migrate status`
|
||||||
|
|
||||||
|
Check connection status to ES and OceanBase.
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
es-ob-migrate status \
|
||||||
|
--es-host localhost --es-port 9200 \
|
||||||
|
--ob-host localhost --ob-port 2881 \
|
||||||
|
--ob-user "root@ragflow" --ob-password "infini_rag_flow"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
|
||||||
|
```
|
||||||
|
Connection Status:
|
||||||
|
|
||||||
|
Elasticsearch:
|
||||||
|
Host: localhost:9200
|
||||||
|
Status: ✓ Connected
|
||||||
|
Cluster: ragflow-cluster
|
||||||
|
Version: 8.11.0
|
||||||
|
Indices: 5
|
||||||
|
|
||||||
|
OceanBase:
|
||||||
|
Host: localhost:2881
|
||||||
|
Status: ✓ Connected
|
||||||
|
Version: 4.3.5.1
|
||||||
|
```
|
||||||
51
tools/es-to-oceanbase-migration/pyproject.toml
Normal file
51
tools/es-to-oceanbase-migration/pyproject.toml
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
[project]
|
||||||
|
name = "es-ob-migration"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Data migration tool from Elasticsearch to OceanBase"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
license = { text = "Apache-2.0" }
|
||||||
|
authors = [{ name = "RAGFlow Team" }]
|
||||||
|
keywords = ["elasticsearch", "oceanbase", "migration", "vector", "rag"]
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"elasticsearch>=8.0.0",
|
||||||
|
"pyobvector>=0.1.0",
|
||||||
|
"pymysql>=1.0.0",
|
||||||
|
"sqlalchemy>=2.0.0",
|
||||||
|
"click>=8.0.0",
|
||||||
|
"tqdm>=4.60.0",
|
||||||
|
"rich>=13.0.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"pytest>=7.0.0",
|
||||||
|
"pytest-asyncio>=0.21.0",
|
||||||
|
"pytest-cov>=4.0.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
es-ob-migrate = "es_ob_migration.cli:main"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["hatchling"]
|
||||||
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
packages = ["src/es_ob_migration"]
|
||||||
|
|
||||||
|
[tool.uv]
|
||||||
|
dev-dependencies = [
|
||||||
|
"pytest>=7.0.0",
|
||||||
|
"pytest-asyncio>=0.21.0",
|
||||||
|
"pytest-cov>=4.0.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
testpaths = ["tests"]
|
||||||
|
python_files = ["test_*.py"]
|
||||||
|
python_classes = ["Test*"]
|
||||||
|
python_functions = ["test_*"]
|
||||||
|
addopts = "-v --tb=short"
|
||||||
|
|
||||||
@ -0,0 +1,41 @@
|
|||||||
|
"""
|
||||||
|
RAGFlow ES to OceanBase Migration Tool
|
||||||
|
|
||||||
|
A CLI tool for migrating RAGFlow data from Elasticsearch 8+ to OceanBase,
|
||||||
|
supporting schema conversion, vector data mapping, batch import, and resume capability.
|
||||||
|
|
||||||
|
This tool is specifically designed for RAGFlow's data structure.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__version__ = "0.1.0"
|
||||||
|
|
||||||
|
from .migrator import ESToOceanBaseMigrator
|
||||||
|
from .es_client import ESClient
|
||||||
|
from .ob_client import OBClient
|
||||||
|
from .schema import RAGFlowSchemaConverter, RAGFlowDataConverter, RAGFLOW_COLUMNS
|
||||||
|
from .verify import MigrationVerifier, VerificationResult
|
||||||
|
from .progress import ProgressManager, MigrationProgress
|
||||||
|
|
||||||
|
# Backwards compatibility aliases
|
||||||
|
SchemaConverter = RAGFlowSchemaConverter
|
||||||
|
DataConverter = RAGFlowDataConverter
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
# Main classes
|
||||||
|
"ESToOceanBaseMigrator",
|
||||||
|
"ESClient",
|
||||||
|
"OBClient",
|
||||||
|
# Schema
|
||||||
|
"RAGFlowSchemaConverter",
|
||||||
|
"RAGFlowDataConverter",
|
||||||
|
"RAGFLOW_COLUMNS",
|
||||||
|
# Verification
|
||||||
|
"MigrationVerifier",
|
||||||
|
"VerificationResult",
|
||||||
|
# Progress
|
||||||
|
"ProgressManager",
|
||||||
|
"MigrationProgress",
|
||||||
|
# Aliases
|
||||||
|
"SchemaConverter",
|
||||||
|
"DataConverter",
|
||||||
|
]
|
||||||
574
tools/es-to-oceanbase-migration/src/es_ob_migration/cli.py
Normal file
574
tools/es-to-oceanbase-migration/src/es_ob_migration/cli.py
Normal file
@ -0,0 +1,574 @@
|
|||||||
|
"""
|
||||||
|
CLI entry point for RAGFlow ES to OceanBase migration tool.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import click
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.table import Table
|
||||||
|
from rich.logging import RichHandler
|
||||||
|
|
||||||
|
from .es_client import ESClient
|
||||||
|
from .ob_client import OBClient
|
||||||
|
from .migrator import ESToOceanBaseMigrator
|
||||||
|
from .verify import MigrationVerifier
|
||||||
|
from .schema import RAGFLOW_COLUMNS
|
||||||
|
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging(verbose: bool = False):
|
||||||
|
"""Setup logging configuration."""
|
||||||
|
level = logging.DEBUG if verbose else logging.INFO
|
||||||
|
logging.basicConfig(
|
||||||
|
level=level,
|
||||||
|
format="%(message)s",
|
||||||
|
datefmt="[%X]",
|
||||||
|
handlers=[RichHandler(rich_tracebacks=True, console=console)],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
@click.option("-v", "--verbose", is_flag=True, help="Enable verbose logging")
|
||||||
|
@click.pass_context
|
||||||
|
def main(ctx, verbose):
|
||||||
|
"""RAGFlow ES to OceanBase Migration Tool.
|
||||||
|
|
||||||
|
Migrate RAGFlow data from Elasticsearch 8+ to OceanBase with schema conversion,
|
||||||
|
vector data mapping, batch import, and resume capability.
|
||||||
|
|
||||||
|
This tool is specifically designed for RAGFlow's data structure.
|
||||||
|
"""
|
||||||
|
ctx.ensure_object(dict)
|
||||||
|
ctx.obj["verbose"] = verbose
|
||||||
|
setup_logging(verbose)
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.option("--es-host", default="localhost", help="Elasticsearch host")
|
||||||
|
@click.option("--es-port", default=9200, type=int, help="Elasticsearch port")
|
||||||
|
@click.option("--es-user", default=None, help="Elasticsearch username")
|
||||||
|
@click.option("--es-password", default=None, help="Elasticsearch password")
|
||||||
|
@click.option("--es-api-key", default=None, help="Elasticsearch API key")
|
||||||
|
@click.option("--ob-host", default="localhost", help="OceanBase host")
|
||||||
|
@click.option("--ob-port", default=2881, type=int, help="OceanBase port")
|
||||||
|
@click.option("--ob-user", default="root@test", help="OceanBase user (format: user@tenant)")
|
||||||
|
@click.option("--ob-password", default="", help="OceanBase password")
|
||||||
|
@click.option("--ob-database", default="test", help="OceanBase database")
|
||||||
|
@click.option("--index", "-i", default=None, help="Source ES index name (omit to migrate all ragflow_* indices)")
|
||||||
|
@click.option("--table", "-t", default=None, help="Target OceanBase table name (omit to use same name as index)")
|
||||||
|
@click.option("--batch-size", default=1000, type=int, help="Batch size for migration")
|
||||||
|
@click.option("--resume", is_flag=True, help="Resume from previous progress")
|
||||||
|
@click.option("--verify/--no-verify", default=True, help="Verify after migration")
|
||||||
|
@click.option("--progress-dir", default=".migration_progress", help="Progress file directory")
|
||||||
|
@click.pass_context
|
||||||
|
def migrate(
|
||||||
|
ctx,
|
||||||
|
es_host,
|
||||||
|
es_port,
|
||||||
|
es_user,
|
||||||
|
es_password,
|
||||||
|
es_api_key,
|
||||||
|
ob_host,
|
||||||
|
ob_port,
|
||||||
|
ob_user,
|
||||||
|
ob_password,
|
||||||
|
ob_database,
|
||||||
|
index,
|
||||||
|
table,
|
||||||
|
batch_size,
|
||||||
|
resume,
|
||||||
|
verify,
|
||||||
|
progress_dir,
|
||||||
|
):
|
||||||
|
"""Run RAGFlow data migration from Elasticsearch to OceanBase.
|
||||||
|
|
||||||
|
If --index is omitted, all indices starting with 'ragflow_' will be migrated.
|
||||||
|
If --table is omitted, the same name as the source index will be used.
|
||||||
|
"""
|
||||||
|
console.print("[bold]RAGFlow ES to OceanBase Migration[/]")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Initialize ES client first to discover indices if needed
|
||||||
|
es_client = ESClient(
|
||||||
|
host=es_host,
|
||||||
|
port=es_port,
|
||||||
|
username=es_user,
|
||||||
|
password=es_password,
|
||||||
|
api_key=es_api_key,
|
||||||
|
)
|
||||||
|
|
||||||
|
ob_client = OBClient(
|
||||||
|
host=ob_host,
|
||||||
|
port=ob_port,
|
||||||
|
user=ob_user,
|
||||||
|
password=ob_password,
|
||||||
|
database=ob_database,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine indices to migrate
|
||||||
|
if index:
|
||||||
|
# Single index specified
|
||||||
|
indices_to_migrate = [(index, table if table else index)]
|
||||||
|
else:
|
||||||
|
# Auto-discover all ragflow_* indices
|
||||||
|
console.print(f"\n[cyan]Discovering RAGFlow indices...[/]")
|
||||||
|
ragflow_indices = es_client.list_ragflow_indices()
|
||||||
|
|
||||||
|
if not ragflow_indices:
|
||||||
|
console.print("[yellow]No ragflow_* indices found in Elasticsearch[/]")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
# Each index maps to a table with the same name
|
||||||
|
indices_to_migrate = [(idx, idx) for idx in ragflow_indices]
|
||||||
|
|
||||||
|
console.print(f"[green]Found {len(indices_to_migrate)} RAGFlow indices:[/]")
|
||||||
|
for idx, _ in indices_to_migrate:
|
||||||
|
doc_count = es_client.count_documents(idx)
|
||||||
|
console.print(f" - {idx} ({doc_count:,} documents)")
|
||||||
|
console.print()
|
||||||
|
|
||||||
|
# Initialize migrator
|
||||||
|
migrator = ESToOceanBaseMigrator(
|
||||||
|
es_client=es_client,
|
||||||
|
ob_client=ob_client,
|
||||||
|
progress_dir=progress_dir,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Track overall results
|
||||||
|
total_success = 0
|
||||||
|
total_failed = 0
|
||||||
|
results = []
|
||||||
|
|
||||||
|
# Migrate each index
|
||||||
|
for es_index, ob_table in indices_to_migrate:
|
||||||
|
console.print(f"\n[bold blue]{'='*60}[/]")
|
||||||
|
console.print(f"[bold]Migrating: {es_index} -> {ob_database}.{ob_table}[/]")
|
||||||
|
console.print(f"[bold blue]{'='*60}[/]")
|
||||||
|
|
||||||
|
result = migrator.migrate(
|
||||||
|
es_index=es_index,
|
||||||
|
ob_table=ob_table,
|
||||||
|
batch_size=batch_size,
|
||||||
|
resume=resume,
|
||||||
|
verify_after=verify,
|
||||||
|
)
|
||||||
|
|
||||||
|
results.append(result)
|
||||||
|
if result["success"]:
|
||||||
|
total_success += 1
|
||||||
|
else:
|
||||||
|
total_failed += 1
|
||||||
|
|
||||||
|
# Summary for multiple indices
|
||||||
|
if len(indices_to_migrate) > 1:
|
||||||
|
console.print(f"\n[bold]{'='*60}[/]")
|
||||||
|
console.print(f"[bold]Migration Summary[/]")
|
||||||
|
console.print(f"[bold]{'='*60}[/]")
|
||||||
|
console.print(f" Total indices: {len(indices_to_migrate)}")
|
||||||
|
console.print(f" [green]Successful: {total_success}[/]")
|
||||||
|
if total_failed > 0:
|
||||||
|
console.print(f" [red]Failed: {total_failed}[/]")
|
||||||
|
|
||||||
|
# Exit code based on results
|
||||||
|
if total_failed == 0:
|
||||||
|
console.print("\n[bold green]All migrations completed successfully![/]")
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
console.print(f"\n[bold red]{total_failed} migration(s) failed[/]")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[bold red]Error: {e}[/]")
|
||||||
|
if ctx.obj.get("verbose"):
|
||||||
|
console.print_exception()
|
||||||
|
sys.exit(1)
|
||||||
|
finally:
|
||||||
|
# Cleanup
|
||||||
|
if "es_client" in locals():
|
||||||
|
es_client.close()
|
||||||
|
if "ob_client" in locals():
|
||||||
|
ob_client.close()
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.option("--es-host", default="localhost", help="Elasticsearch host")
|
||||||
|
@click.option("--es-port", default=9200, type=int, help="Elasticsearch port")
|
||||||
|
@click.option("--es-user", default=None, help="Elasticsearch username")
|
||||||
|
@click.option("--es-password", default=None, help="Elasticsearch password")
|
||||||
|
@click.option("--index", "-i", required=True, help="ES index name")
|
||||||
|
@click.option("--output", "-o", default=None, help="Output file (JSON)")
|
||||||
|
@click.pass_context
|
||||||
|
def schema(ctx, es_host, es_port, es_user, es_password, index, output):
|
||||||
|
"""Preview RAGFlow schema analysis from ES mapping."""
|
||||||
|
try:
|
||||||
|
es_client = ESClient(
|
||||||
|
host=es_host,
|
||||||
|
port=es_port,
|
||||||
|
username=es_user,
|
||||||
|
password=es_password,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Dummy OB client for schema preview
|
||||||
|
ob_client = None
|
||||||
|
|
||||||
|
migrator = ESToOceanBaseMigrator(es_client, ob_client if ob_client else OBClient.__new__(OBClient))
|
||||||
|
# Directly use schema converter
|
||||||
|
from .schema import RAGFlowSchemaConverter
|
||||||
|
converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
es_mapping = es_client.get_index_mapping(index)
|
||||||
|
analysis = converter.analyze_es_mapping(es_mapping)
|
||||||
|
column_defs = converter.get_column_definitions()
|
||||||
|
|
||||||
|
# Display analysis
|
||||||
|
console.print(f"\n[bold]ES Index Analysis: {index}[/]\n")
|
||||||
|
|
||||||
|
# Known RAGFlow fields
|
||||||
|
console.print(f"[green]Known RAGFlow fields:[/] {len(analysis['known_fields'])}")
|
||||||
|
|
||||||
|
# Vector fields
|
||||||
|
if analysis['vector_fields']:
|
||||||
|
console.print(f"\n[cyan]Vector fields detected:[/]")
|
||||||
|
for vf in analysis['vector_fields']:
|
||||||
|
console.print(f" - {vf['name']} (dimension: {vf['dimension']})")
|
||||||
|
|
||||||
|
# Unknown fields
|
||||||
|
if analysis['unknown_fields']:
|
||||||
|
console.print(f"\n[yellow]Unknown fields (will be stored in 'extra'):[/]")
|
||||||
|
for uf in analysis['unknown_fields']:
|
||||||
|
console.print(f" - {uf}")
|
||||||
|
|
||||||
|
# Display RAGFlow column schema
|
||||||
|
console.print(f"\n[bold]RAGFlow OceanBase Schema ({len(column_defs)} columns):[/]\n")
|
||||||
|
|
||||||
|
table = Table(title="Column Definitions")
|
||||||
|
table.add_column("Column Name", style="cyan")
|
||||||
|
table.add_column("OB Type", style="green")
|
||||||
|
table.add_column("Nullable", style="yellow")
|
||||||
|
table.add_column("Special", style="magenta")
|
||||||
|
|
||||||
|
for col in column_defs[:20]: # Show first 20
|
||||||
|
special = []
|
||||||
|
if col.get("is_primary"):
|
||||||
|
special.append("PK")
|
||||||
|
if col.get("index"):
|
||||||
|
special.append("IDX")
|
||||||
|
if col.get("is_array"):
|
||||||
|
special.append("ARRAY")
|
||||||
|
if col.get("is_vector"):
|
||||||
|
special.append("VECTOR")
|
||||||
|
|
||||||
|
table.add_row(
|
||||||
|
col["name"],
|
||||||
|
col["ob_type"],
|
||||||
|
"Yes" if col.get("nullable", True) else "No",
|
||||||
|
", ".join(special) if special else "-",
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(column_defs) > 20:
|
||||||
|
table.add_row("...", f"({len(column_defs) - 20} more)", "", "")
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
|
||||||
|
# Save to file if requested
|
||||||
|
if output:
|
||||||
|
preview = {
|
||||||
|
"es_index": index,
|
||||||
|
"es_mapping": es_mapping,
|
||||||
|
"analysis": analysis,
|
||||||
|
"ob_columns": column_defs,
|
||||||
|
}
|
||||||
|
with open(output, "w") as f:
|
||||||
|
json.dump(preview, f, indent=2, default=str)
|
||||||
|
console.print(f"\nSchema saved to {output}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[bold red]Error: {e}[/]")
|
||||||
|
if ctx.obj.get("verbose"):
|
||||||
|
console.print_exception()
|
||||||
|
sys.exit(1)
|
||||||
|
finally:
|
||||||
|
if "es_client" in locals():
|
||||||
|
es_client.close()
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.option("--es-host", default="localhost", help="Elasticsearch host")
|
||||||
|
@click.option("--es-port", default=9200, type=int, help="Elasticsearch port")
|
||||||
|
@click.option("--ob-host", default="localhost", help="OceanBase host")
|
||||||
|
@click.option("--ob-port", default=2881, type=int, help="OceanBase port")
|
||||||
|
@click.option("--ob-user", default="root@test", help="OceanBase user")
|
||||||
|
@click.option("--ob-password", default="", help="OceanBase password")
|
||||||
|
@click.option("--ob-database", default="test", help="OceanBase database")
|
||||||
|
@click.option("--index", "-i", required=True, help="Source ES index name")
|
||||||
|
@click.option("--table", "-t", required=True, help="Target OceanBase table name")
|
||||||
|
@click.option("--sample-size", default=100, type=int, help="Sample size for verification")
|
||||||
|
@click.pass_context
|
||||||
|
def verify(
|
||||||
|
ctx,
|
||||||
|
es_host,
|
||||||
|
es_port,
|
||||||
|
ob_host,
|
||||||
|
ob_port,
|
||||||
|
ob_user,
|
||||||
|
ob_password,
|
||||||
|
ob_database,
|
||||||
|
index,
|
||||||
|
table,
|
||||||
|
sample_size,
|
||||||
|
):
|
||||||
|
"""Verify migration data consistency."""
|
||||||
|
try:
|
||||||
|
es_client = ESClient(host=es_host, port=es_port)
|
||||||
|
ob_client = OBClient(
|
||||||
|
host=ob_host,
|
||||||
|
port=ob_port,
|
||||||
|
user=ob_user,
|
||||||
|
password=ob_password,
|
||||||
|
database=ob_database,
|
||||||
|
)
|
||||||
|
|
||||||
|
verifier = MigrationVerifier(es_client, ob_client)
|
||||||
|
result = verifier.verify(
|
||||||
|
index, table,
|
||||||
|
sample_size=sample_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
console.print(verifier.generate_report(result))
|
||||||
|
|
||||||
|
sys.exit(0 if result.passed else 1)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[bold red]Error: {e}[/]")
|
||||||
|
if ctx.obj.get("verbose"):
|
||||||
|
console.print_exception()
|
||||||
|
sys.exit(1)
|
||||||
|
finally:
|
||||||
|
if "es_client" in locals():
|
||||||
|
es_client.close()
|
||||||
|
if "ob_client" in locals():
|
||||||
|
ob_client.close()
|
||||||
|
|
||||||
|
|
||||||
|
@main.command("list-indices")
|
||||||
|
@click.option("--es-host", default="localhost", help="Elasticsearch host")
|
||||||
|
@click.option("--es-port", default=9200, type=int, help="Elasticsearch port")
|
||||||
|
@click.option("--es-user", default=None, help="Elasticsearch username")
|
||||||
|
@click.option("--es-password", default=None, help="Elasticsearch password")
|
||||||
|
@click.pass_context
|
||||||
|
def list_indices(ctx, es_host, es_port, es_user, es_password):
|
||||||
|
"""List all RAGFlow indices (ragflow_*) in Elasticsearch."""
|
||||||
|
try:
|
||||||
|
es_client = ESClient(
|
||||||
|
host=es_host,
|
||||||
|
port=es_port,
|
||||||
|
username=es_user,
|
||||||
|
password=es_password,
|
||||||
|
)
|
||||||
|
|
||||||
|
console.print(f"\n[bold]RAGFlow Indices in Elasticsearch ({es_host}:{es_port})[/]\n")
|
||||||
|
|
||||||
|
indices = es_client.list_ragflow_indices()
|
||||||
|
|
||||||
|
if not indices:
|
||||||
|
console.print("[yellow]No ragflow_* indices found[/]")
|
||||||
|
return
|
||||||
|
|
||||||
|
table = Table(title="RAGFlow Indices")
|
||||||
|
table.add_column("Index Name", style="cyan")
|
||||||
|
table.add_column("Document Count", style="green", justify="right")
|
||||||
|
table.add_column("Type", style="yellow")
|
||||||
|
|
||||||
|
total_docs = 0
|
||||||
|
for idx in indices:
|
||||||
|
doc_count = es_client.count_documents(idx)
|
||||||
|
total_docs += doc_count
|
||||||
|
|
||||||
|
# Determine index type
|
||||||
|
if idx.startswith("ragflow_doc_meta_"):
|
||||||
|
idx_type = "Metadata"
|
||||||
|
elif idx.startswith("ragflow_"):
|
||||||
|
idx_type = "Document Chunks"
|
||||||
|
else:
|
||||||
|
idx_type = "Unknown"
|
||||||
|
|
||||||
|
table.add_row(idx, f"{doc_count:,}", idx_type)
|
||||||
|
|
||||||
|
table.add_row("", "", "")
|
||||||
|
table.add_row("[bold]Total[/]", f"[bold]{total_docs:,}[/]", f"[bold]{len(indices)} indices[/]")
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[bold red]Error: {e}[/]")
|
||||||
|
if ctx.obj.get("verbose"):
|
||||||
|
console.print_exception()
|
||||||
|
sys.exit(1)
|
||||||
|
finally:
|
||||||
|
if "es_client" in locals():
|
||||||
|
es_client.close()
|
||||||
|
|
||||||
|
|
||||||
|
@main.command("list-kb")
|
||||||
|
@click.option("--es-host", default="localhost", help="Elasticsearch host")
|
||||||
|
@click.option("--es-port", default=9200, type=int, help="Elasticsearch port")
|
||||||
|
@click.option("--es-user", default=None, help="Elasticsearch username")
|
||||||
|
@click.option("--es-password", default=None, help="Elasticsearch password")
|
||||||
|
@click.option("--index", "-i", required=True, help="ES index name")
|
||||||
|
@click.pass_context
|
||||||
|
def list_kb(ctx, es_host, es_port, es_user, es_password, index):
|
||||||
|
"""List all knowledge bases in an ES index."""
|
||||||
|
try:
|
||||||
|
es_client = ESClient(
|
||||||
|
host=es_host,
|
||||||
|
port=es_port,
|
||||||
|
username=es_user,
|
||||||
|
password=es_password,
|
||||||
|
)
|
||||||
|
|
||||||
|
console.print(f"\n[bold]Knowledge Bases in index: {index}[/]\n")
|
||||||
|
|
||||||
|
# Get kb_id aggregation
|
||||||
|
agg_result = es_client.aggregate_field(index, "kb_id")
|
||||||
|
buckets = agg_result.get("buckets", [])
|
||||||
|
|
||||||
|
if not buckets:
|
||||||
|
console.print("[yellow]No knowledge bases found[/]")
|
||||||
|
return
|
||||||
|
|
||||||
|
table = Table(title="Knowledge Bases")
|
||||||
|
table.add_column("KB ID", style="cyan")
|
||||||
|
table.add_column("Document Count", style="green", justify="right")
|
||||||
|
|
||||||
|
total_docs = 0
|
||||||
|
for bucket in buckets:
|
||||||
|
table.add_row(
|
||||||
|
bucket["key"],
|
||||||
|
f"{bucket['doc_count']:,}",
|
||||||
|
)
|
||||||
|
total_docs += bucket["doc_count"]
|
||||||
|
|
||||||
|
table.add_row("", "")
|
||||||
|
table.add_row("[bold]Total[/]", f"[bold]{total_docs:,}[/]")
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
console.print(f"\nTotal knowledge bases: {len(buckets)}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[bold red]Error: {e}[/]")
|
||||||
|
if ctx.obj.get("verbose"):
|
||||||
|
console.print_exception()
|
||||||
|
sys.exit(1)
|
||||||
|
finally:
|
||||||
|
if "es_client" in locals():
|
||||||
|
es_client.close()
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.option("--es-host", default="localhost", help="Elasticsearch host")
|
||||||
|
@click.option("--es-port", default=9200, type=int, help="Elasticsearch port")
|
||||||
|
@click.option("--ob-host", default="localhost", help="OceanBase host")
|
||||||
|
@click.option("--ob-port", default=2881, type=int, help="OceanBase port")
|
||||||
|
@click.option("--ob-user", default="root@test", help="OceanBase user")
|
||||||
|
@click.option("--ob-password", default="", help="OceanBase password")
|
||||||
|
@click.pass_context
|
||||||
|
def status(ctx, es_host, es_port, ob_host, ob_port, ob_user, ob_password):
|
||||||
|
"""Check connection status to ES and OceanBase."""
|
||||||
|
console.print("[bold]Connection Status[/]\n")
|
||||||
|
|
||||||
|
# Check ES
|
||||||
|
try:
|
||||||
|
es_client = ESClient(host=es_host, port=es_port)
|
||||||
|
health = es_client.health_check()
|
||||||
|
info = es_client.get_cluster_info()
|
||||||
|
console.print(f"[green]Elasticsearch ({es_host}:{es_port}): Connected[/]")
|
||||||
|
console.print(f" Cluster: {health.get('cluster_name')}")
|
||||||
|
console.print(f" Status: {health.get('status')}")
|
||||||
|
console.print(f" Version: {info.get('version', {}).get('number', 'unknown')}")
|
||||||
|
|
||||||
|
# List indices
|
||||||
|
indices = es_client.list_indices("*")
|
||||||
|
console.print(f" Indices: {len(indices)}")
|
||||||
|
|
||||||
|
es_client.close()
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[red]Elasticsearch ({es_host}:{es_port}): Failed[/]")
|
||||||
|
console.print(f" Error: {e}")
|
||||||
|
|
||||||
|
console.print()
|
||||||
|
|
||||||
|
# Check OceanBase
|
||||||
|
try:
|
||||||
|
ob_client = OBClient(
|
||||||
|
host=ob_host,
|
||||||
|
port=ob_port,
|
||||||
|
user=ob_user,
|
||||||
|
password=ob_password,
|
||||||
|
)
|
||||||
|
if ob_client.health_check():
|
||||||
|
version = ob_client.get_version()
|
||||||
|
console.print(f"[green]OceanBase ({ob_host}:{ob_port}): Connected[/]")
|
||||||
|
console.print(f" Version: {version}")
|
||||||
|
else:
|
||||||
|
console.print(f"[red]OceanBase ({ob_host}:{ob_port}): Health check failed[/]")
|
||||||
|
ob_client.close()
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[red]OceanBase ({ob_host}:{ob_port}): Failed[/]")
|
||||||
|
console.print(f" Error: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.option("--es-host", default="localhost", help="Elasticsearch host")
|
||||||
|
@click.option("--es-port", default=9200, type=int, help="Elasticsearch port")
|
||||||
|
@click.option("--index", "-i", required=True, help="ES index name")
|
||||||
|
@click.option("--size", "-n", default=5, type=int, help="Number of samples")
|
||||||
|
@click.pass_context
|
||||||
|
def sample(ctx, es_host, es_port, index, size):
|
||||||
|
"""Show sample documents from ES index."""
|
||||||
|
try:
|
||||||
|
es_client = ESClient(host=es_host, port=es_port)
|
||||||
|
|
||||||
|
docs = es_client.get_sample_documents(index, size)
|
||||||
|
|
||||||
|
console.print(f"\n[bold]Sample documents from {index}[/]")
|
||||||
|
console.print()
|
||||||
|
|
||||||
|
for i, doc in enumerate(docs, 1):
|
||||||
|
console.print(f"[bold cyan]Document {i}[/]")
|
||||||
|
console.print(f" _id: {doc.get('_id')}")
|
||||||
|
console.print(f" kb_id: {doc.get('kb_id')}")
|
||||||
|
console.print(f" doc_id: {doc.get('doc_id')}")
|
||||||
|
console.print(f" docnm_kwd: {doc.get('docnm_kwd')}")
|
||||||
|
|
||||||
|
# Check for vector fields
|
||||||
|
vector_fields = [k for k in doc.keys() if k.startswith("q_") and k.endswith("_vec")]
|
||||||
|
if vector_fields:
|
||||||
|
for vf in vector_fields:
|
||||||
|
vec = doc.get(vf)
|
||||||
|
if vec:
|
||||||
|
console.print(f" {vf}: [{len(vec)} dimensions]")
|
||||||
|
|
||||||
|
content = doc.get("content_with_weight", "")
|
||||||
|
if content:
|
||||||
|
if isinstance(content, dict):
|
||||||
|
content = json.dumps(content, ensure_ascii=False)
|
||||||
|
preview = content[:100] + "..." if len(str(content)) > 100 else content
|
||||||
|
console.print(f" content: {preview}")
|
||||||
|
|
||||||
|
console.print()
|
||||||
|
|
||||||
|
es_client.close()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[bold red]Error: {e}[/]")
|
||||||
|
if ctx.obj.get("verbose"):
|
||||||
|
console.print_exception()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
292
tools/es-to-oceanbase-migration/src/es_ob_migration/es_client.py
Normal file
292
tools/es-to-oceanbase-migration/src/es_ob_migration/es_client.py
Normal file
@ -0,0 +1,292 @@
|
|||||||
|
"""
|
||||||
|
Elasticsearch 8+ Client for RAGFlow data migration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any, Iterator
|
||||||
|
|
||||||
|
from elasticsearch import Elasticsearch
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ESClient:
|
||||||
|
"""Elasticsearch client wrapper for RAGFlow migration operations."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
host: str = "localhost",
|
||||||
|
port: int = 9200,
|
||||||
|
username: str | None = None,
|
||||||
|
password: str | None = None,
|
||||||
|
api_key: str | None = None,
|
||||||
|
use_ssl: bool = False,
|
||||||
|
verify_certs: bool = True,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize ES client.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
host: ES host address
|
||||||
|
port: ES port
|
||||||
|
username: Basic auth username
|
||||||
|
password: Basic auth password
|
||||||
|
api_key: API key for authentication
|
||||||
|
use_ssl: Whether to use SSL
|
||||||
|
verify_certs: Whether to verify SSL certificates
|
||||||
|
"""
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
|
||||||
|
# Build connection URL
|
||||||
|
scheme = "https" if use_ssl else "http"
|
||||||
|
url = f"{scheme}://{host}:{port}"
|
||||||
|
|
||||||
|
# Build connection arguments
|
||||||
|
conn_args: dict[str, Any] = {
|
||||||
|
"hosts": [url],
|
||||||
|
"verify_certs": verify_certs,
|
||||||
|
}
|
||||||
|
|
||||||
|
if api_key:
|
||||||
|
conn_args["api_key"] = api_key
|
||||||
|
elif username and password:
|
||||||
|
conn_args["basic_auth"] = (username, password)
|
||||||
|
|
||||||
|
self.client = Elasticsearch(**conn_args)
|
||||||
|
logger.info(f"Connected to Elasticsearch at {url}")
|
||||||
|
|
||||||
|
def health_check(self) -> dict[str, Any]:
|
||||||
|
"""Check cluster health."""
|
||||||
|
return self.client.cluster.health().body
|
||||||
|
|
||||||
|
def get_cluster_info(self) -> dict[str, Any]:
|
||||||
|
"""Get cluster information."""
|
||||||
|
return self.client.info().body
|
||||||
|
|
||||||
|
def list_indices(self, pattern: str = "*") -> list[str]:
|
||||||
|
"""List all indices matching pattern."""
|
||||||
|
response = self.client.indices.get(index=pattern)
|
||||||
|
return list(response.keys())
|
||||||
|
|
||||||
|
def list_ragflow_indices(self) -> list[str]:
|
||||||
|
"""
|
||||||
|
List all RAGFlow-related indices.
|
||||||
|
|
||||||
|
Returns indices matching patterns:
|
||||||
|
- ragflow_* (document chunks)
|
||||||
|
- ragflow_doc_meta_* (document metadata)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of RAGFlow index names
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Get all ragflow_* indices
|
||||||
|
ragflow_indices = self.list_indices("ragflow_*")
|
||||||
|
return sorted(ragflow_indices)
|
||||||
|
except Exception:
|
||||||
|
# If no indices match, return empty list
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_index_mapping(self, index_name: str) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get index mapping.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
index_name: Name of the index
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Index mapping dictionary
|
||||||
|
"""
|
||||||
|
response = self.client.indices.get_mapping(index=index_name)
|
||||||
|
return response[index_name]["mappings"]
|
||||||
|
|
||||||
|
def get_index_settings(self, index_name: str) -> dict[str, Any]:
|
||||||
|
"""Get index settings."""
|
||||||
|
response = self.client.indices.get_settings(index=index_name)
|
||||||
|
return response[index_name]["settings"]
|
||||||
|
|
||||||
|
def count_documents(self, index_name: str) -> int:
|
||||||
|
"""Count documents in an index."""
|
||||||
|
response = self.client.count(index=index_name)
|
||||||
|
return response["count"]
|
||||||
|
|
||||||
|
def count_documents_with_filter(
|
||||||
|
self,
|
||||||
|
index_name: str,
|
||||||
|
filters: dict[str, Any]
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Count documents with filter conditions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
index_name: Index name
|
||||||
|
filters: Filter conditions (e.g., {"kb_id": "xxx"})
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Document count
|
||||||
|
"""
|
||||||
|
# Build bool query with filters
|
||||||
|
must_clauses = []
|
||||||
|
for field, value in filters.items():
|
||||||
|
if isinstance(value, list):
|
||||||
|
must_clauses.append({"terms": {field: value}})
|
||||||
|
else:
|
||||||
|
must_clauses.append({"term": {field: value}})
|
||||||
|
|
||||||
|
query = {
|
||||||
|
"bool": {
|
||||||
|
"must": must_clauses
|
||||||
|
}
|
||||||
|
} if must_clauses else {"match_all": {}}
|
||||||
|
|
||||||
|
response = self.client.count(index=index_name, query=query)
|
||||||
|
return response["count"]
|
||||||
|
|
||||||
|
def aggregate_field(
|
||||||
|
self,
|
||||||
|
index_name: str,
|
||||||
|
field: str,
|
||||||
|
size: int = 10000,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Aggregate field values (like getting all unique kb_ids).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
index_name: Index name
|
||||||
|
field: Field to aggregate
|
||||||
|
size: Max number of buckets
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Aggregation result with buckets
|
||||||
|
"""
|
||||||
|
response = self.client.search(
|
||||||
|
index=index_name,
|
||||||
|
size=0,
|
||||||
|
aggs={
|
||||||
|
"field_values": {
|
||||||
|
"terms": {
|
||||||
|
"field": field,
|
||||||
|
"size": size,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return response["aggregations"]["field_values"]
|
||||||
|
|
||||||
|
def scroll_documents(
|
||||||
|
self,
|
||||||
|
index_name: str,
|
||||||
|
batch_size: int = 1000,
|
||||||
|
query: dict[str, Any] | None = None,
|
||||||
|
sort_field: str = "_doc",
|
||||||
|
) -> Iterator[list[dict[str, Any]]]:
|
||||||
|
"""
|
||||||
|
Scroll through all documents in an index using search_after (ES 8+).
|
||||||
|
|
||||||
|
This is the recommended approach for ES 8+ instead of scroll API.
|
||||||
|
Uses search_after for efficient deep pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
index_name: Name of the index
|
||||||
|
batch_size: Number of documents per batch
|
||||||
|
query: Optional query filter
|
||||||
|
sort_field: Field to sort by (default: _doc for efficiency)
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
Batches of documents
|
||||||
|
"""
|
||||||
|
search_body: dict[str, Any] = {
|
||||||
|
"size": batch_size,
|
||||||
|
"sort": [{sort_field: "asc"}, {"_id": "asc"}],
|
||||||
|
}
|
||||||
|
|
||||||
|
if query:
|
||||||
|
search_body["query"] = query
|
||||||
|
else:
|
||||||
|
search_body["query"] = {"match_all": {}}
|
||||||
|
|
||||||
|
# Initial search
|
||||||
|
response = self.client.search(index=index_name, body=search_body)
|
||||||
|
hits = response["hits"]["hits"]
|
||||||
|
|
||||||
|
while hits:
|
||||||
|
# Extract documents with _id
|
||||||
|
documents = []
|
||||||
|
for hit in hits:
|
||||||
|
doc = hit["_source"].copy()
|
||||||
|
doc["_id"] = hit["_id"]
|
||||||
|
if "_score" in hit:
|
||||||
|
doc["_score"] = hit["_score"]
|
||||||
|
documents.append(doc)
|
||||||
|
|
||||||
|
yield documents
|
||||||
|
|
||||||
|
# Check if there are more results
|
||||||
|
if len(hits) < batch_size:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Get search_after value from last hit
|
||||||
|
search_after = hits[-1]["sort"]
|
||||||
|
search_body["search_after"] = search_after
|
||||||
|
|
||||||
|
response = self.client.search(index=index_name, body=search_body)
|
||||||
|
hits = response["hits"]["hits"]
|
||||||
|
|
||||||
|
def get_document(self, index_name: str, doc_id: str) -> dict[str, Any] | None:
|
||||||
|
"""Get a single document by ID."""
|
||||||
|
try:
|
||||||
|
response = self.client.get(index=index_name, id=doc_id)
|
||||||
|
doc = response["_source"].copy()
|
||||||
|
doc["_id"] = response["_id"]
|
||||||
|
return doc
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_sample_documents(
|
||||||
|
self,
|
||||||
|
index_name: str,
|
||||||
|
size: int = 10,
|
||||||
|
query: dict[str, Any] | None = None,
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Get sample documents from an index.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
index_name: Index name
|
||||||
|
size: Number of samples
|
||||||
|
query: Optional query filter
|
||||||
|
"""
|
||||||
|
search_body = {
|
||||||
|
"query": query if query else {"match_all": {}},
|
||||||
|
"size": size
|
||||||
|
}
|
||||||
|
|
||||||
|
response = self.client.search(index=index_name, body=search_body)
|
||||||
|
documents = []
|
||||||
|
for hit in response["hits"]["hits"]:
|
||||||
|
doc = hit["_source"].copy()
|
||||||
|
doc["_id"] = hit["_id"]
|
||||||
|
documents.append(doc)
|
||||||
|
return documents
|
||||||
|
|
||||||
|
def get_document_ids(
|
||||||
|
self,
|
||||||
|
index_name: str,
|
||||||
|
size: int = 1000,
|
||||||
|
query: dict[str, Any] | None = None,
|
||||||
|
) -> list[str]:
|
||||||
|
"""Get list of document IDs."""
|
||||||
|
search_body = {
|
||||||
|
"query": query if query else {"match_all": {}},
|
||||||
|
"size": size,
|
||||||
|
"_source": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = self.client.search(index=index_name, body=search_body)
|
||||||
|
return [hit["_id"] for hit in response["hits"]["hits"]]
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the ES client connection."""
|
||||||
|
self.client.close()
|
||||||
|
logger.info("Elasticsearch connection closed")
|
||||||
370
tools/es-to-oceanbase-migration/src/es_ob_migration/migrator.py
Normal file
370
tools/es-to-oceanbase-migration/src/es_ob_migration/migrator.py
Normal file
@ -0,0 +1,370 @@
|
|||||||
|
"""
|
||||||
|
RAGFlow-specific migration orchestrator from Elasticsearch to OceanBase.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Any, Callable
|
||||||
|
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.progress import (
|
||||||
|
Progress,
|
||||||
|
SpinnerColumn,
|
||||||
|
TextColumn,
|
||||||
|
BarColumn,
|
||||||
|
TaskProgressColumn,
|
||||||
|
TimeRemainingColumn,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .es_client import ESClient
|
||||||
|
from .ob_client import OBClient
|
||||||
|
from .schema import RAGFlowSchemaConverter, RAGFlowDataConverter, VECTOR_FIELD_PATTERN
|
||||||
|
from .progress import ProgressManager, MigrationProgress
|
||||||
|
from .verify import MigrationVerifier
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
|
||||||
|
class ESToOceanBaseMigrator:
|
||||||
|
"""
|
||||||
|
RAGFlow-specific migration orchestrator.
|
||||||
|
|
||||||
|
This migrator is designed specifically for RAGFlow's data structure,
|
||||||
|
handling the fixed schema and vector embeddings correctly.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
es_client: ESClient,
|
||||||
|
ob_client: OBClient,
|
||||||
|
progress_dir: str = ".migration_progress",
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize migrator.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_client: Elasticsearch client
|
||||||
|
ob_client: OceanBase client
|
||||||
|
progress_dir: Directory for progress files
|
||||||
|
"""
|
||||||
|
self.es_client = es_client
|
||||||
|
self.ob_client = ob_client
|
||||||
|
self.progress_manager = ProgressManager(progress_dir)
|
||||||
|
self.schema_converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
def migrate(
|
||||||
|
self,
|
||||||
|
es_index: str,
|
||||||
|
ob_table: str,
|
||||||
|
batch_size: int = 1000,
|
||||||
|
resume: bool = False,
|
||||||
|
verify_after: bool = True,
|
||||||
|
on_progress: Callable[[int, int], None] | None = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Execute full migration from ES to OceanBase for RAGFlow data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_index: Source Elasticsearch index
|
||||||
|
ob_table: Target OceanBase table
|
||||||
|
batch_size: Documents per batch
|
||||||
|
resume: Resume from previous progress
|
||||||
|
verify_after: Run verification after migration
|
||||||
|
on_progress: Progress callback (migrated, total)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Migration result dictionary
|
||||||
|
"""
|
||||||
|
start_time = time.time()
|
||||||
|
result = {
|
||||||
|
"success": False,
|
||||||
|
"es_index": es_index,
|
||||||
|
"ob_table": ob_table,
|
||||||
|
"total_documents": 0,
|
||||||
|
"migrated_documents": 0,
|
||||||
|
"failed_documents": 0,
|
||||||
|
"duration_seconds": 0,
|
||||||
|
"verification": None,
|
||||||
|
"error": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
progress: MigrationProgress | None = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Step 1: Check connections
|
||||||
|
console.print("[bold blue]Step 1: Checking connections...[/]")
|
||||||
|
self._check_connections()
|
||||||
|
|
||||||
|
# Step 2: Analyze ES index
|
||||||
|
console.print("\n[bold blue]Step 2: Analyzing ES index...[/]")
|
||||||
|
analysis = self._analyze_es_index(es_index)
|
||||||
|
|
||||||
|
# Auto-detect vector size from ES mapping
|
||||||
|
vector_size = 768 # Default fallback
|
||||||
|
if analysis["vector_fields"]:
|
||||||
|
vector_size = analysis["vector_fields"][0]["dimension"]
|
||||||
|
console.print(f" [green]Auto-detected vector dimension: {vector_size}[/]")
|
||||||
|
else:
|
||||||
|
console.print(f" [yellow]No vector fields found, using default: {vector_size}[/]")
|
||||||
|
console.print(f" Known RAGFlow fields: {len(analysis['known_fields'])}")
|
||||||
|
if analysis["unknown_fields"]:
|
||||||
|
console.print(f" [yellow]Unknown fields (will be stored in 'extra'): {analysis['unknown_fields']}[/]")
|
||||||
|
|
||||||
|
# Step 3: Get total document count
|
||||||
|
total_docs = self.es_client.count_documents(es_index)
|
||||||
|
console.print(f" Total documents: {total_docs:,}")
|
||||||
|
|
||||||
|
result["total_documents"] = total_docs
|
||||||
|
|
||||||
|
if total_docs == 0:
|
||||||
|
console.print("[yellow]No documents to migrate[/]")
|
||||||
|
result["success"] = True
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Step 4: Handle resume or fresh start
|
||||||
|
if resume and self.progress_manager.can_resume(es_index, ob_table):
|
||||||
|
console.print("\n[bold yellow]Resuming from previous progress...[/]")
|
||||||
|
progress = self.progress_manager.load_progress(es_index, ob_table)
|
||||||
|
console.print(
|
||||||
|
f" Previously migrated: {progress.migrated_documents:,} documents"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Fresh start - check if table already exists
|
||||||
|
if self.ob_client.table_exists(ob_table):
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Table '{ob_table}' already exists in OceanBase. "
|
||||||
|
f"Migration aborted to prevent data conflicts. "
|
||||||
|
f"Please drop the table manually or use a different table name."
|
||||||
|
)
|
||||||
|
|
||||||
|
progress = self.progress_manager.create_progress(
|
||||||
|
es_index, ob_table, total_docs
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 5: Create table if needed
|
||||||
|
if not progress.table_created:
|
||||||
|
console.print("\n[bold blue]Step 3: Creating OceanBase table...[/]")
|
||||||
|
if not self.ob_client.table_exists(ob_table):
|
||||||
|
self.ob_client.create_ragflow_table(
|
||||||
|
table_name=ob_table,
|
||||||
|
vector_size=vector_size,
|
||||||
|
create_indexes=True,
|
||||||
|
create_fts_indexes=True,
|
||||||
|
)
|
||||||
|
console.print(f" Created table '{ob_table}' with RAGFlow schema")
|
||||||
|
else:
|
||||||
|
console.print(f" Table '{ob_table}' already exists")
|
||||||
|
# Check and add vector column if needed
|
||||||
|
self.ob_client.add_vector_column(ob_table, vector_size)
|
||||||
|
|
||||||
|
progress.table_created = True
|
||||||
|
progress.indexes_created = True
|
||||||
|
progress.schema_converted = True
|
||||||
|
self.progress_manager.save_progress(progress)
|
||||||
|
|
||||||
|
# Step 6: Migrate data
|
||||||
|
console.print("\n[bold blue]Step 4: Migrating data...[/]")
|
||||||
|
data_converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
migrated = self._migrate_data(
|
||||||
|
es_index=es_index,
|
||||||
|
ob_table=ob_table,
|
||||||
|
data_converter=data_converter,
|
||||||
|
progress=progress,
|
||||||
|
batch_size=batch_size,
|
||||||
|
on_progress=on_progress,
|
||||||
|
)
|
||||||
|
|
||||||
|
result["migrated_documents"] = migrated
|
||||||
|
result["failed_documents"] = progress.failed_documents
|
||||||
|
|
||||||
|
# Step 7: Mark completed
|
||||||
|
self.progress_manager.mark_completed(progress)
|
||||||
|
|
||||||
|
# Step 8: Verify (optional)
|
||||||
|
if verify_after:
|
||||||
|
console.print("\n[bold blue]Step 5: Verifying migration...[/]")
|
||||||
|
verifier = MigrationVerifier(self.es_client, self.ob_client)
|
||||||
|
verification = verifier.verify(
|
||||||
|
es_index, ob_table,
|
||||||
|
primary_key="id"
|
||||||
|
)
|
||||||
|
result["verification"] = {
|
||||||
|
"passed": verification.passed,
|
||||||
|
"message": verification.message,
|
||||||
|
"es_count": verification.es_count,
|
||||||
|
"ob_count": verification.ob_count,
|
||||||
|
"sample_match_rate": verification.sample_match_rate,
|
||||||
|
}
|
||||||
|
console.print(verifier.generate_report(verification))
|
||||||
|
|
||||||
|
result["success"] = True
|
||||||
|
result["duration_seconds"] = time.time() - start_time
|
||||||
|
|
||||||
|
console.print(
|
||||||
|
f"\n[bold green]Migration completed successfully![/]"
|
||||||
|
f"\n Total: {result['total_documents']:,} documents"
|
||||||
|
f"\n Migrated: {result['migrated_documents']:,} documents"
|
||||||
|
f"\n Failed: {result['failed_documents']:,} documents"
|
||||||
|
f"\n Duration: {result['duration_seconds']:.1f} seconds"
|
||||||
|
)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
console.print("\n[bold yellow]Migration interrupted by user[/]")
|
||||||
|
if progress:
|
||||||
|
self.progress_manager.mark_paused(progress)
|
||||||
|
result["error"] = "Interrupted by user"
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("Migration failed")
|
||||||
|
if progress:
|
||||||
|
self.progress_manager.mark_failed(progress, str(e))
|
||||||
|
result["error"] = str(e)
|
||||||
|
console.print(f"\n[bold red]Migration failed: {e}[/]")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _check_connections(self):
|
||||||
|
"""Verify connections to both databases."""
|
||||||
|
# Check ES
|
||||||
|
es_health = self.es_client.health_check()
|
||||||
|
if es_health.get("status") not in ("green", "yellow"):
|
||||||
|
raise RuntimeError(f"ES cluster unhealthy: {es_health}")
|
||||||
|
console.print(f" ES cluster status: {es_health.get('status')}")
|
||||||
|
|
||||||
|
# Check OceanBase
|
||||||
|
if not self.ob_client.health_check():
|
||||||
|
raise RuntimeError("OceanBase connection failed")
|
||||||
|
|
||||||
|
ob_version = self.ob_client.get_version()
|
||||||
|
console.print(f" OceanBase connection: OK (version: {ob_version})")
|
||||||
|
|
||||||
|
def _analyze_es_index(self, es_index: str) -> dict[str, Any]:
|
||||||
|
"""Analyze ES index structure for RAGFlow compatibility."""
|
||||||
|
es_mapping = self.es_client.get_index_mapping(es_index)
|
||||||
|
return self.schema_converter.analyze_es_mapping(es_mapping)
|
||||||
|
|
||||||
|
def _migrate_data(
|
||||||
|
self,
|
||||||
|
es_index: str,
|
||||||
|
ob_table: str,
|
||||||
|
data_converter: RAGFlowDataConverter,
|
||||||
|
progress: MigrationProgress,
|
||||||
|
batch_size: int,
|
||||||
|
on_progress: Callable[[int, int], None] | None,
|
||||||
|
) -> int:
|
||||||
|
"""Migrate data in batches."""
|
||||||
|
total = progress.total_documents
|
||||||
|
migrated = progress.migrated_documents
|
||||||
|
|
||||||
|
with Progress(
|
||||||
|
SpinnerColumn(),
|
||||||
|
TextColumn("[progress.description]{task.description}"),
|
||||||
|
BarColumn(),
|
||||||
|
TaskProgressColumn(),
|
||||||
|
TimeRemainingColumn(),
|
||||||
|
console=console,
|
||||||
|
) as pbar:
|
||||||
|
task = pbar.add_task(
|
||||||
|
"Migrating...",
|
||||||
|
total=total,
|
||||||
|
completed=migrated,
|
||||||
|
)
|
||||||
|
|
||||||
|
batch_count = 0
|
||||||
|
for batch in self.es_client.scroll_documents(es_index, batch_size):
|
||||||
|
batch_count += 1
|
||||||
|
|
||||||
|
# Convert batch to OceanBase format
|
||||||
|
ob_rows = data_converter.convert_batch(batch)
|
||||||
|
|
||||||
|
# Insert batch
|
||||||
|
try:
|
||||||
|
inserted = self.ob_client.insert_batch(ob_table, ob_rows)
|
||||||
|
migrated += inserted
|
||||||
|
|
||||||
|
# Update progress
|
||||||
|
last_ids = [doc.get("_id", doc.get("id", "")) for doc in batch]
|
||||||
|
self.progress_manager.update_progress(
|
||||||
|
progress,
|
||||||
|
migrated_count=inserted,
|
||||||
|
last_batch_ids=last_ids,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update progress bar
|
||||||
|
pbar.update(task, completed=migrated)
|
||||||
|
|
||||||
|
# Callback
|
||||||
|
if on_progress:
|
||||||
|
on_progress(migrated, total)
|
||||||
|
|
||||||
|
# Log periodically
|
||||||
|
if batch_count % 10 == 0:
|
||||||
|
logger.info(f"Migrated {migrated:,}/{total:,} documents")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Batch insert failed: {e}")
|
||||||
|
progress.failed_documents += len(batch)
|
||||||
|
# Continue with next batch
|
||||||
|
|
||||||
|
return migrated
|
||||||
|
|
||||||
|
def get_schema_preview(self, es_index: str) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get a preview of schema analysis without executing migration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_index: Elasticsearch index name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Schema analysis information
|
||||||
|
"""
|
||||||
|
es_mapping = self.es_client.get_index_mapping(es_index)
|
||||||
|
analysis = self.schema_converter.analyze_es_mapping(es_mapping)
|
||||||
|
column_defs = self.schema_converter.get_column_definitions()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"es_index": es_index,
|
||||||
|
"es_mapping": es_mapping,
|
||||||
|
"analysis": analysis,
|
||||||
|
"ob_columns": column_defs,
|
||||||
|
"vector_fields": self.schema_converter.get_vector_fields(),
|
||||||
|
"total_columns": len(column_defs),
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_data_preview(
|
||||||
|
self,
|
||||||
|
es_index: str,
|
||||||
|
sample_size: int = 5,
|
||||||
|
kb_id: str | None = None,
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Get sample documents from ES for preview.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_index: ES index name
|
||||||
|
sample_size: Number of samples
|
||||||
|
kb_id: Optional KB filter
|
||||||
|
"""
|
||||||
|
query = None
|
||||||
|
if kb_id:
|
||||||
|
query = {"term": {"kb_id": kb_id}}
|
||||||
|
return self.es_client.get_sample_documents(es_index, sample_size, query=query)
|
||||||
|
|
||||||
|
def list_knowledge_bases(self, es_index: str) -> list[str]:
|
||||||
|
"""
|
||||||
|
List all knowledge base IDs in an ES index.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_index: ES index name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of kb_id values
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
agg_result = self.es_client.aggregate_field(es_index, "kb_id")
|
||||||
|
return [bucket["key"] for bucket in agg_result.get("buckets", [])]
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to list knowledge bases: {e}")
|
||||||
|
return []
|
||||||
442
tools/es-to-oceanbase-migration/src/es_ob_migration/ob_client.py
Normal file
442
tools/es-to-oceanbase-migration/src/es_ob_migration/ob_client.py
Normal file
@ -0,0 +1,442 @@
|
|||||||
|
"""
|
||||||
|
OceanBase Client for RAGFlow data migration.
|
||||||
|
|
||||||
|
This client is specifically designed for RAGFlow's data structure.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pyobvector import ObVecClient, FtsIndexParam, FtsParser, VECTOR, ARRAY
|
||||||
|
from sqlalchemy import Column, String, Integer, Float, JSON, Text, text, Double
|
||||||
|
from sqlalchemy.dialects.mysql import LONGTEXT, TEXT as MYSQL_TEXT
|
||||||
|
|
||||||
|
from .schema import RAGFLOW_COLUMNS, ARRAY_COLUMNS, FTS_COLUMNS_TKS
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# Index naming templates (from RAGFlow ob_conn.py)
|
||||||
|
INDEX_NAME_TEMPLATE = "ix_%s_%s"
|
||||||
|
FULLTEXT_INDEX_NAME_TEMPLATE = "fts_idx_%s"
|
||||||
|
VECTOR_INDEX_NAME_TEMPLATE = "%s_idx"
|
||||||
|
|
||||||
|
# Columns that need regular indexes
|
||||||
|
INDEX_COLUMNS = [
|
||||||
|
"kb_id",
|
||||||
|
"doc_id",
|
||||||
|
"available_int",
|
||||||
|
"knowledge_graph_kwd",
|
||||||
|
"entity_type_kwd",
|
||||||
|
"removed_kwd",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class OBClient:
|
||||||
|
"""OceanBase client wrapper for RAGFlow migration operations."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
host: str = "localhost",
|
||||||
|
port: int = 2881,
|
||||||
|
user: str = "root",
|
||||||
|
password: str = "",
|
||||||
|
database: str = "test",
|
||||||
|
pool_size: int = 10,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize OceanBase client.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
host: OceanBase host address
|
||||||
|
port: OceanBase port
|
||||||
|
user: Database user (format: user@tenant for OceanBase)
|
||||||
|
password: Database password
|
||||||
|
database: Database name
|
||||||
|
pool_size: Connection pool size
|
||||||
|
"""
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
self.user = user
|
||||||
|
self.password = password
|
||||||
|
self.database = database
|
||||||
|
|
||||||
|
# Initialize pyobvector client
|
||||||
|
self.uri = f"{host}:{port}"
|
||||||
|
self.client = ObVecClient(
|
||||||
|
uri=self.uri,
|
||||||
|
user=user,
|
||||||
|
password=password,
|
||||||
|
db_name=database,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_recycle=3600,
|
||||||
|
pool_size=pool_size,
|
||||||
|
)
|
||||||
|
logger.info(f"Connected to OceanBase at {self.uri}, database: {database}")
|
||||||
|
|
||||||
|
def health_check(self) -> bool:
|
||||||
|
"""Check database connectivity."""
|
||||||
|
try:
|
||||||
|
result = self.client.perform_raw_text_sql("SELECT 1 FROM DUAL")
|
||||||
|
result.fetchone()
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"OceanBase health check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_version(self) -> str | None:
|
||||||
|
"""Get OceanBase version."""
|
||||||
|
try:
|
||||||
|
result = self.client.perform_raw_text_sql("SELECT OB_VERSION() FROM DUAL")
|
||||||
|
row = result.fetchone()
|
||||||
|
return row[0] if row else None
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to get OceanBase version: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def table_exists(self, table_name: str) -> bool:
|
||||||
|
"""Check if a table exists."""
|
||||||
|
try:
|
||||||
|
return self.client.check_table_exists(table_name)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def create_ragflow_table(
|
||||||
|
self,
|
||||||
|
table_name: str,
|
||||||
|
vector_size: int = 768,
|
||||||
|
create_indexes: bool = True,
|
||||||
|
create_fts_indexes: bool = True,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Create a RAGFlow-compatible table in OceanBase.
|
||||||
|
|
||||||
|
This creates a table with the exact schema that RAGFlow expects,
|
||||||
|
including all columns, indexes, and vector columns.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
table_name: Name of the table (usually the ES index name)
|
||||||
|
vector_size: Vector dimension (e.g., 768, 1024, 1536)
|
||||||
|
create_indexes: Whether to create regular indexes
|
||||||
|
create_fts_indexes: Whether to create fulltext indexes
|
||||||
|
"""
|
||||||
|
# Build column definitions
|
||||||
|
columns = self._build_ragflow_columns()
|
||||||
|
|
||||||
|
# Add vector column
|
||||||
|
vector_column_name = f"q_{vector_size}_vec"
|
||||||
|
columns.append(
|
||||||
|
Column(vector_column_name, VECTOR(vector_size), nullable=True,
|
||||||
|
comment=f"vector embedding ({vector_size} dimensions)")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Table options (from RAGFlow)
|
||||||
|
table_options = {
|
||||||
|
"mysql_charset": "utf8mb4",
|
||||||
|
"mysql_collate": "utf8mb4_unicode_ci",
|
||||||
|
"mysql_organization": "heap",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create table
|
||||||
|
self.client.create_table(
|
||||||
|
table_name=table_name,
|
||||||
|
columns=columns,
|
||||||
|
**table_options,
|
||||||
|
)
|
||||||
|
logger.info(f"Created table: {table_name}")
|
||||||
|
|
||||||
|
# Create regular indexes
|
||||||
|
if create_indexes:
|
||||||
|
self._create_regular_indexes(table_name)
|
||||||
|
|
||||||
|
# Create fulltext indexes
|
||||||
|
if create_fts_indexes:
|
||||||
|
self._create_fulltext_indexes(table_name)
|
||||||
|
|
||||||
|
# Create vector index
|
||||||
|
self._create_vector_index(table_name, vector_column_name)
|
||||||
|
|
||||||
|
# Refresh metadata
|
||||||
|
self.client.refresh_metadata([table_name])
|
||||||
|
|
||||||
|
def _build_ragflow_columns(self) -> list[Column]:
|
||||||
|
"""Build SQLAlchemy Column objects for RAGFlow schema."""
|
||||||
|
columns = []
|
||||||
|
|
||||||
|
for col_name, col_def in RAGFLOW_COLUMNS.items():
|
||||||
|
ob_type = col_def["ob_type"]
|
||||||
|
nullable = col_def.get("nullable", True)
|
||||||
|
default = col_def.get("default")
|
||||||
|
is_primary = col_def.get("is_primary", False)
|
||||||
|
is_array = col_def.get("is_array", False)
|
||||||
|
|
||||||
|
# Parse type and create appropriate Column
|
||||||
|
col = self._create_column(col_name, ob_type, nullable, default, is_primary, is_array)
|
||||||
|
columns.append(col)
|
||||||
|
|
||||||
|
return columns
|
||||||
|
|
||||||
|
def _create_column(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
ob_type: str,
|
||||||
|
nullable: bool,
|
||||||
|
default: Any,
|
||||||
|
is_primary: bool,
|
||||||
|
is_array: bool,
|
||||||
|
) -> Column:
|
||||||
|
"""Create a SQLAlchemy Column object based on type string."""
|
||||||
|
|
||||||
|
# Handle array types
|
||||||
|
if is_array or ob_type.startswith("ARRAY"):
|
||||||
|
# Extract inner type
|
||||||
|
if "String" in ob_type:
|
||||||
|
inner_type = String(256)
|
||||||
|
elif "Integer" in ob_type:
|
||||||
|
inner_type = Integer
|
||||||
|
else:
|
||||||
|
inner_type = String(256)
|
||||||
|
|
||||||
|
# Nested array (e.g., ARRAY(ARRAY(Integer)))
|
||||||
|
if ob_type.count("ARRAY") > 1:
|
||||||
|
return Column(name, ARRAY(ARRAY(inner_type)), nullable=nullable)
|
||||||
|
else:
|
||||||
|
return Column(name, ARRAY(inner_type), nullable=nullable)
|
||||||
|
|
||||||
|
# Handle String types with length
|
||||||
|
if ob_type.startswith("String"):
|
||||||
|
# Extract length: String(256) -> 256
|
||||||
|
import re
|
||||||
|
match = re.search(r'\((\d+)\)', ob_type)
|
||||||
|
length = int(match.group(1)) if match else 256
|
||||||
|
return Column(
|
||||||
|
name, String(length),
|
||||||
|
primary_key=is_primary,
|
||||||
|
nullable=nullable,
|
||||||
|
server_default=f"'{default}'" if default else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Map other types
|
||||||
|
type_map = {
|
||||||
|
"Integer": Integer,
|
||||||
|
"Double": Double,
|
||||||
|
"Float": Float,
|
||||||
|
"JSON": JSON,
|
||||||
|
"LONGTEXT": LONGTEXT,
|
||||||
|
"TEXT": MYSQL_TEXT,
|
||||||
|
}
|
||||||
|
|
||||||
|
for type_name, type_class in type_map.items():
|
||||||
|
if type_name in ob_type:
|
||||||
|
return Column(
|
||||||
|
name, type_class,
|
||||||
|
primary_key=is_primary,
|
||||||
|
nullable=nullable,
|
||||||
|
server_default=str(default) if default is not None else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Default to String
|
||||||
|
return Column(name, String(256), nullable=nullable)
|
||||||
|
|
||||||
|
def _create_regular_indexes(self, table_name: str):
|
||||||
|
"""Create regular indexes for indexed columns."""
|
||||||
|
for col_name in INDEX_COLUMNS:
|
||||||
|
index_name = INDEX_NAME_TEMPLATE % (table_name, col_name)
|
||||||
|
try:
|
||||||
|
self.client.create_index(
|
||||||
|
table_name=table_name,
|
||||||
|
is_vec_index=False,
|
||||||
|
index_name=index_name,
|
||||||
|
column_names=[col_name],
|
||||||
|
)
|
||||||
|
logger.debug(f"Created index: {index_name}")
|
||||||
|
except Exception as e:
|
||||||
|
if "Duplicate" in str(e):
|
||||||
|
logger.debug(f"Index {index_name} already exists")
|
||||||
|
else:
|
||||||
|
logger.warning(f"Failed to create index {index_name}: {e}")
|
||||||
|
|
||||||
|
def _create_fulltext_indexes(self, table_name: str):
|
||||||
|
"""Create fulltext indexes for text columns."""
|
||||||
|
for fts_column in FTS_COLUMNS_TKS:
|
||||||
|
col_name = fts_column.split("^")[0] # Remove weight suffix
|
||||||
|
index_name = FULLTEXT_INDEX_NAME_TEMPLATE % col_name
|
||||||
|
try:
|
||||||
|
self.client.create_fts_idx_with_fts_index_param(
|
||||||
|
table_name=table_name,
|
||||||
|
fts_idx_param=FtsIndexParam(
|
||||||
|
index_name=index_name,
|
||||||
|
field_names=[col_name],
|
||||||
|
parser_type=FtsParser.IK,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
logger.debug(f"Created fulltext index: {index_name}")
|
||||||
|
except Exception as e:
|
||||||
|
if "Duplicate" in str(e):
|
||||||
|
logger.debug(f"Fulltext index {index_name} already exists")
|
||||||
|
else:
|
||||||
|
logger.warning(f"Failed to create fulltext index {index_name}: {e}")
|
||||||
|
|
||||||
|
def _create_vector_index(self, table_name: str, vector_column_name: str):
|
||||||
|
"""Create vector index for embedding column."""
|
||||||
|
index_name = VECTOR_INDEX_NAME_TEMPLATE % vector_column_name
|
||||||
|
try:
|
||||||
|
self.client.create_index(
|
||||||
|
table_name=table_name,
|
||||||
|
is_vec_index=True,
|
||||||
|
index_name=index_name,
|
||||||
|
column_names=[vector_column_name],
|
||||||
|
vidx_params="distance=cosine, type=hnsw, lib=vsag",
|
||||||
|
)
|
||||||
|
logger.info(f"Created vector index: {index_name}")
|
||||||
|
except Exception as e:
|
||||||
|
if "Duplicate" in str(e):
|
||||||
|
logger.debug(f"Vector index {index_name} already exists")
|
||||||
|
else:
|
||||||
|
logger.warning(f"Failed to create vector index {index_name}: {e}")
|
||||||
|
|
||||||
|
def add_vector_column(self, table_name: str, vector_size: int):
|
||||||
|
"""Add a vector column to an existing table."""
|
||||||
|
vector_column_name = f"q_{vector_size}_vec"
|
||||||
|
|
||||||
|
# Check if column exists
|
||||||
|
if self._column_exists(table_name, vector_column_name):
|
||||||
|
logger.info(f"Vector column {vector_column_name} already exists")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.client.add_columns(
|
||||||
|
table_name=table_name,
|
||||||
|
columns=[Column(vector_column_name, VECTOR(vector_size), nullable=True)],
|
||||||
|
)
|
||||||
|
logger.info(f"Added vector column: {vector_column_name}")
|
||||||
|
|
||||||
|
# Create index
|
||||||
|
self._create_vector_index(table_name, vector_column_name)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to add vector column: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _column_exists(self, table_name: str, column_name: str) -> bool:
|
||||||
|
"""Check if a column exists in a table."""
|
||||||
|
try:
|
||||||
|
result = self.client.perform_raw_text_sql(
|
||||||
|
f"SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS "
|
||||||
|
f"WHERE TABLE_SCHEMA = '{self.database}' "
|
||||||
|
f"AND TABLE_NAME = '{table_name}' "
|
||||||
|
f"AND COLUMN_NAME = '{column_name}'"
|
||||||
|
)
|
||||||
|
count = result.fetchone()[0]
|
||||||
|
return count > 0
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _index_exists(self, table_name: str, index_name: str) -> bool:
|
||||||
|
"""Check if an index exists."""
|
||||||
|
try:
|
||||||
|
result = self.client.perform_raw_text_sql(
|
||||||
|
f"SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS "
|
||||||
|
f"WHERE TABLE_SCHEMA = '{self.database}' "
|
||||||
|
f"AND TABLE_NAME = '{table_name}' "
|
||||||
|
f"AND INDEX_NAME = '{index_name}'"
|
||||||
|
)
|
||||||
|
count = result.fetchone()[0]
|
||||||
|
return count > 0
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def insert_batch(
|
||||||
|
self,
|
||||||
|
table_name: str,
|
||||||
|
documents: list[dict[str, Any]],
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Insert a batch of documents using upsert.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
table_name: Name of the table
|
||||||
|
documents: List of documents to insert
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of documents inserted
|
||||||
|
"""
|
||||||
|
if not documents:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.client.upsert(table_name=table_name, data=documents)
|
||||||
|
return len(documents)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Batch insert failed: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def count_rows(self, table_name: str, kb_id: str | None = None) -> int:
|
||||||
|
"""
|
||||||
|
Count rows in a table.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
table_name: Table name
|
||||||
|
kb_id: Optional knowledge base ID filter
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
sql = f"SELECT COUNT(*) FROM `{table_name}`"
|
||||||
|
if kb_id:
|
||||||
|
sql += f" WHERE kb_id = '{kb_id}'"
|
||||||
|
result = self.client.perform_raw_text_sql(sql)
|
||||||
|
return result.fetchone()[0]
|
||||||
|
except Exception:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def get_sample_rows(
|
||||||
|
self,
|
||||||
|
table_name: str,
|
||||||
|
limit: int = 10,
|
||||||
|
kb_id: str | None = None,
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Get sample rows from a table."""
|
||||||
|
try:
|
||||||
|
sql = f"SELECT * FROM `{table_name}`"
|
||||||
|
if kb_id:
|
||||||
|
sql += f" WHERE kb_id = '{kb_id}'"
|
||||||
|
sql += f" LIMIT {limit}"
|
||||||
|
|
||||||
|
result = self.client.perform_raw_text_sql(sql)
|
||||||
|
columns = result.keys()
|
||||||
|
rows = []
|
||||||
|
for row in result:
|
||||||
|
rows.append(dict(zip(columns, row)))
|
||||||
|
return rows
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get sample rows: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_row_by_id(self, table_name: str, doc_id: str) -> dict[str, Any] | None:
|
||||||
|
"""Get a single row by ID."""
|
||||||
|
try:
|
||||||
|
result = self.client.get(table_name=table_name, ids=[doc_id])
|
||||||
|
row = result.fetchone()
|
||||||
|
if row:
|
||||||
|
columns = result.keys()
|
||||||
|
return dict(zip(columns, row))
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get row: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def drop_table(self, table_name: str):
|
||||||
|
"""Drop a table if exists."""
|
||||||
|
try:
|
||||||
|
self.client.drop_table_if_exist(table_name)
|
||||||
|
logger.info(f"Dropped table: {table_name}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to drop table: {e}")
|
||||||
|
|
||||||
|
def execute_sql(self, sql: str) -> Any:
|
||||||
|
"""Execute raw SQL."""
|
||||||
|
return self.client.perform_raw_text_sql(sql)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the OB client connection."""
|
||||||
|
self.client.engine.dispose()
|
||||||
|
logger.info("OceanBase connection closed")
|
||||||
221
tools/es-to-oceanbase-migration/src/es_ob_migration/progress.py
Normal file
221
tools/es-to-oceanbase-migration/src/es_ob_migration/progress.py
Normal file
@ -0,0 +1,221 @@
|
|||||||
|
"""
|
||||||
|
Progress tracking and resume capability for migration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass, field, asdict
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MigrationProgress:
|
||||||
|
"""Migration progress state."""
|
||||||
|
|
||||||
|
# Basic info
|
||||||
|
es_index: str
|
||||||
|
ob_table: str
|
||||||
|
started_at: str = ""
|
||||||
|
updated_at: str = ""
|
||||||
|
|
||||||
|
# Progress counters
|
||||||
|
total_documents: int = 0
|
||||||
|
migrated_documents: int = 0
|
||||||
|
failed_documents: int = 0
|
||||||
|
|
||||||
|
# State for resume
|
||||||
|
last_sort_values: list[Any] = field(default_factory=list)
|
||||||
|
last_batch_ids: list[str] = field(default_factory=list)
|
||||||
|
|
||||||
|
# Status
|
||||||
|
status: str = "pending" # pending, running, completed, failed, paused
|
||||||
|
error_message: str = ""
|
||||||
|
|
||||||
|
# Schema info
|
||||||
|
schema_converted: bool = False
|
||||||
|
table_created: bool = False
|
||||||
|
indexes_created: bool = False
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
if not self.started_at:
|
||||||
|
self.started_at = datetime.utcnow().isoformat()
|
||||||
|
self.updated_at = datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
|
||||||
|
class ProgressManager:
|
||||||
|
"""Manage migration progress persistence."""
|
||||||
|
|
||||||
|
def __init__(self, progress_dir: str = ".migration_progress"):
|
||||||
|
"""
|
||||||
|
Initialize progress manager.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
progress_dir: Directory to store progress files
|
||||||
|
"""
|
||||||
|
self.progress_dir = Path(progress_dir)
|
||||||
|
self.progress_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def _get_progress_file(self, es_index: str, ob_table: str) -> Path:
|
||||||
|
"""Get progress file path for a migration."""
|
||||||
|
filename = f"{es_index}_to_{ob_table}.json"
|
||||||
|
return self.progress_dir / filename
|
||||||
|
|
||||||
|
def load_progress(
|
||||||
|
self, es_index: str, ob_table: str
|
||||||
|
) -> MigrationProgress | None:
|
||||||
|
"""
|
||||||
|
Load progress from file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_index: Elasticsearch index name
|
||||||
|
ob_table: OceanBase table name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
MigrationProgress if exists, None otherwise
|
||||||
|
"""
|
||||||
|
progress_file = self._get_progress_file(es_index, ob_table)
|
||||||
|
|
||||||
|
if not progress_file.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(progress_file, "r") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
progress = MigrationProgress(**data)
|
||||||
|
logger.info(
|
||||||
|
f"Loaded progress: {progress.migrated_documents}/{progress.total_documents} documents"
|
||||||
|
)
|
||||||
|
return progress
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to load progress: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def save_progress(self, progress: MigrationProgress):
|
||||||
|
"""
|
||||||
|
Save progress to file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
progress: MigrationProgress instance
|
||||||
|
"""
|
||||||
|
progress.updated_at = datetime.utcnow().isoformat()
|
||||||
|
progress_file = self._get_progress_file(progress.es_index, progress.ob_table)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(progress_file, "w") as f:
|
||||||
|
json.dump(asdict(progress), f, indent=2, default=str)
|
||||||
|
logger.debug(f"Saved progress to {progress_file}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to save progress: {e}")
|
||||||
|
|
||||||
|
def delete_progress(self, es_index: str, ob_table: str):
|
||||||
|
"""Delete progress file."""
|
||||||
|
progress_file = self._get_progress_file(es_index, ob_table)
|
||||||
|
if progress_file.exists():
|
||||||
|
progress_file.unlink()
|
||||||
|
logger.info(f"Deleted progress file: {progress_file}")
|
||||||
|
|
||||||
|
def create_progress(
|
||||||
|
self,
|
||||||
|
es_index: str,
|
||||||
|
ob_table: str,
|
||||||
|
total_documents: int,
|
||||||
|
) -> MigrationProgress:
|
||||||
|
"""
|
||||||
|
Create new progress tracker.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_index: Elasticsearch index name
|
||||||
|
ob_table: OceanBase table name
|
||||||
|
total_documents: Total documents to migrate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
New MigrationProgress instance
|
||||||
|
"""
|
||||||
|
progress = MigrationProgress(
|
||||||
|
es_index=es_index,
|
||||||
|
ob_table=ob_table,
|
||||||
|
total_documents=total_documents,
|
||||||
|
status="running",
|
||||||
|
)
|
||||||
|
self.save_progress(progress)
|
||||||
|
return progress
|
||||||
|
|
||||||
|
def update_progress(
|
||||||
|
self,
|
||||||
|
progress: MigrationProgress,
|
||||||
|
migrated_count: int,
|
||||||
|
last_sort_values: list[Any] | None = None,
|
||||||
|
last_batch_ids: list[str] | None = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Update progress after a batch.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
progress: MigrationProgress instance
|
||||||
|
migrated_count: Number of documents migrated in this batch
|
||||||
|
last_sort_values: Sort values for search_after
|
||||||
|
last_batch_ids: IDs of documents in last batch
|
||||||
|
"""
|
||||||
|
progress.migrated_documents += migrated_count
|
||||||
|
|
||||||
|
if last_sort_values:
|
||||||
|
progress.last_sort_values = last_sort_values
|
||||||
|
if last_batch_ids:
|
||||||
|
progress.last_batch_ids = last_batch_ids
|
||||||
|
|
||||||
|
self.save_progress(progress)
|
||||||
|
|
||||||
|
def mark_completed(self, progress: MigrationProgress):
|
||||||
|
"""Mark migration as completed."""
|
||||||
|
progress.status = "completed"
|
||||||
|
progress.updated_at = datetime.utcnow().isoformat()
|
||||||
|
self.save_progress(progress)
|
||||||
|
logger.info(
|
||||||
|
f"Migration completed: {progress.migrated_documents} documents"
|
||||||
|
)
|
||||||
|
|
||||||
|
def mark_failed(self, progress: MigrationProgress, error: str):
|
||||||
|
"""Mark migration as failed."""
|
||||||
|
progress.status = "failed"
|
||||||
|
progress.error_message = error
|
||||||
|
progress.updated_at = datetime.utcnow().isoformat()
|
||||||
|
self.save_progress(progress)
|
||||||
|
logger.error(f"Migration failed: {error}")
|
||||||
|
|
||||||
|
def mark_paused(self, progress: MigrationProgress):
|
||||||
|
"""Mark migration as paused (for resume later)."""
|
||||||
|
progress.status = "paused"
|
||||||
|
progress.updated_at = datetime.utcnow().isoformat()
|
||||||
|
self.save_progress(progress)
|
||||||
|
logger.info(
|
||||||
|
f"Migration paused at {progress.migrated_documents}/{progress.total_documents}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def can_resume(self, es_index: str, ob_table: str) -> bool:
|
||||||
|
"""Check if migration can be resumed."""
|
||||||
|
progress = self.load_progress(es_index, ob_table)
|
||||||
|
if not progress:
|
||||||
|
return False
|
||||||
|
return progress.status in ("running", "paused", "failed")
|
||||||
|
|
||||||
|
def get_resume_info(self, es_index: str, ob_table: str) -> dict[str, Any] | None:
|
||||||
|
"""Get information needed to resume migration."""
|
||||||
|
progress = self.load_progress(es_index, ob_table)
|
||||||
|
if not progress:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return {
|
||||||
|
"migrated_documents": progress.migrated_documents,
|
||||||
|
"total_documents": progress.total_documents,
|
||||||
|
"last_sort_values": progress.last_sort_values,
|
||||||
|
"last_batch_ids": progress.last_batch_ids,
|
||||||
|
"schema_converted": progress.schema_converted,
|
||||||
|
"table_created": progress.table_created,
|
||||||
|
"indexes_created": progress.indexes_created,
|
||||||
|
"status": progress.status,
|
||||||
|
}
|
||||||
451
tools/es-to-oceanbase-migration/src/es_ob_migration/schema.py
Normal file
451
tools/es-to-oceanbase-migration/src/es_ob_migration/schema.py
Normal file
@ -0,0 +1,451 @@
|
|||||||
|
"""
|
||||||
|
RAGFlow-specific schema conversion from Elasticsearch to OceanBase.
|
||||||
|
|
||||||
|
This module handles the fixed RAGFlow table structure migration.
|
||||||
|
RAGFlow uses a predefined schema for both ES and OceanBase.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# RAGFlow fixed column definitions (from rag/utils/ob_conn.py)
|
||||||
|
# These are the actual columns used by RAGFlow
|
||||||
|
RAGFLOW_COLUMNS = {
|
||||||
|
# Primary identifiers
|
||||||
|
"id": {"ob_type": "String(256)", "nullable": False, "is_primary": True},
|
||||||
|
"kb_id": {"ob_type": "String(256)", "nullable": False, "index": True},
|
||||||
|
"doc_id": {"ob_type": "String(256)", "nullable": True, "index": True},
|
||||||
|
|
||||||
|
# Document metadata
|
||||||
|
"docnm_kwd": {"ob_type": "String(256)", "nullable": True}, # document name
|
||||||
|
"doc_type_kwd": {"ob_type": "String(256)", "nullable": True}, # document type
|
||||||
|
|
||||||
|
# Title fields
|
||||||
|
"title_tks": {"ob_type": "String(256)", "nullable": True}, # title tokens
|
||||||
|
"title_sm_tks": {"ob_type": "String(256)", "nullable": True}, # fine-grained title tokens
|
||||||
|
|
||||||
|
# Content fields
|
||||||
|
"content_with_weight": {"ob_type": "LONGTEXT", "nullable": True}, # original content
|
||||||
|
"content_ltks": {"ob_type": "LONGTEXT", "nullable": True}, # long text tokens
|
||||||
|
"content_sm_ltks": {"ob_type": "LONGTEXT", "nullable": True}, # fine-grained tokens
|
||||||
|
|
||||||
|
# Feature fields
|
||||||
|
"pagerank_fea": {"ob_type": "Integer", "nullable": True}, # page rank priority
|
||||||
|
|
||||||
|
# Array fields
|
||||||
|
"important_kwd": {"ob_type": "ARRAY(String(256))", "nullable": True, "is_array": True}, # keywords
|
||||||
|
"important_tks": {"ob_type": "TEXT", "nullable": True}, # keyword tokens
|
||||||
|
"question_kwd": {"ob_type": "ARRAY(String(1024))", "nullable": True, "is_array": True}, # questions
|
||||||
|
"question_tks": {"ob_type": "TEXT", "nullable": True}, # question tokens
|
||||||
|
"tag_kwd": {"ob_type": "ARRAY(String(256))", "nullable": True, "is_array": True}, # tags
|
||||||
|
"tag_feas": {"ob_type": "JSON", "nullable": True, "is_json": True}, # tag features
|
||||||
|
|
||||||
|
# Status fields
|
||||||
|
"available_int": {"ob_type": "Integer", "nullable": False, "default": 1},
|
||||||
|
|
||||||
|
# Time fields
|
||||||
|
"create_time": {"ob_type": "String(19)", "nullable": True},
|
||||||
|
"create_timestamp_flt": {"ob_type": "Double", "nullable": True},
|
||||||
|
|
||||||
|
# Image field
|
||||||
|
"img_id": {"ob_type": "String(128)", "nullable": True},
|
||||||
|
|
||||||
|
# Position fields (arrays)
|
||||||
|
"position_int": {"ob_type": "ARRAY(ARRAY(Integer))", "nullable": True, "is_array": True},
|
||||||
|
"page_num_int": {"ob_type": "ARRAY(Integer)", "nullable": True, "is_array": True},
|
||||||
|
"top_int": {"ob_type": "ARRAY(Integer)", "nullable": True, "is_array": True},
|
||||||
|
|
||||||
|
# Knowledge graph fields
|
||||||
|
"knowledge_graph_kwd": {"ob_type": "String(256)", "nullable": True, "index": True},
|
||||||
|
"source_id": {"ob_type": "ARRAY(String(256))", "nullable": True, "is_array": True},
|
||||||
|
"entity_kwd": {"ob_type": "String(256)", "nullable": True},
|
||||||
|
"entity_type_kwd": {"ob_type": "String(256)", "nullable": True, "index": True},
|
||||||
|
"from_entity_kwd": {"ob_type": "String(256)", "nullable": True},
|
||||||
|
"to_entity_kwd": {"ob_type": "String(256)", "nullable": True},
|
||||||
|
"weight_int": {"ob_type": "Integer", "nullable": True},
|
||||||
|
"weight_flt": {"ob_type": "Double", "nullable": True},
|
||||||
|
"entities_kwd": {"ob_type": "ARRAY(String(256))", "nullable": True, "is_array": True},
|
||||||
|
"rank_flt": {"ob_type": "Double", "nullable": True},
|
||||||
|
|
||||||
|
# Status
|
||||||
|
"removed_kwd": {"ob_type": "String(256)", "nullable": True, "index": True, "default": "N"},
|
||||||
|
|
||||||
|
# JSON fields
|
||||||
|
"metadata": {"ob_type": "JSON", "nullable": True, "is_json": True},
|
||||||
|
"extra": {"ob_type": "JSON", "nullable": True, "is_json": True},
|
||||||
|
|
||||||
|
# New columns
|
||||||
|
"_order_id": {"ob_type": "Integer", "nullable": True},
|
||||||
|
"group_id": {"ob_type": "String(256)", "nullable": True},
|
||||||
|
"mom_id": {"ob_type": "String(256)", "nullable": True},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Array column names for special handling
|
||||||
|
ARRAY_COLUMNS = [
|
||||||
|
"important_kwd", "question_kwd", "tag_kwd", "source_id",
|
||||||
|
"entities_kwd", "position_int", "page_num_int", "top_int"
|
||||||
|
]
|
||||||
|
|
||||||
|
# JSON column names
|
||||||
|
JSON_COLUMNS = ["tag_feas", "metadata", "extra"]
|
||||||
|
|
||||||
|
# Fulltext search columns (for reference)
|
||||||
|
FTS_COLUMNS_ORIGIN = ["docnm_kwd", "content_with_weight", "important_tks", "question_tks"]
|
||||||
|
FTS_COLUMNS_TKS = ["title_tks", "title_sm_tks", "important_tks", "question_tks", "content_ltks", "content_sm_ltks"]
|
||||||
|
|
||||||
|
# Vector field pattern: q_{vector_size}_vec
|
||||||
|
VECTOR_FIELD_PATTERN = re.compile(r"q_(?P<vector_size>\d+)_vec")
|
||||||
|
|
||||||
|
|
||||||
|
class RAGFlowSchemaConverter:
|
||||||
|
"""
|
||||||
|
Convert RAGFlow Elasticsearch documents to OceanBase format.
|
||||||
|
|
||||||
|
RAGFlow uses a fixed schema, so this converter knows exactly
|
||||||
|
what fields to expect and how to map them.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.vector_fields: list[dict[str, Any]] = []
|
||||||
|
self.detected_vector_size: int | None = None
|
||||||
|
|
||||||
|
def analyze_es_mapping(self, es_mapping: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Analyze ES mapping to extract vector field dimensions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_mapping: Elasticsearch index mapping
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Analysis result with detected fields
|
||||||
|
"""
|
||||||
|
result = {
|
||||||
|
"known_fields": [],
|
||||||
|
"vector_fields": [],
|
||||||
|
"unknown_fields": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
properties = es_mapping.get("properties", {})
|
||||||
|
|
||||||
|
for field_name, field_def in properties.items():
|
||||||
|
# Check if it's a known RAGFlow field
|
||||||
|
if field_name in RAGFLOW_COLUMNS:
|
||||||
|
result["known_fields"].append(field_name)
|
||||||
|
# Check if it's a vector field
|
||||||
|
elif VECTOR_FIELD_PATTERN.match(field_name):
|
||||||
|
match = VECTOR_FIELD_PATTERN.match(field_name)
|
||||||
|
vec_size = int(match.group("vector_size"))
|
||||||
|
result["vector_fields"].append({
|
||||||
|
"name": field_name,
|
||||||
|
"dimension": vec_size,
|
||||||
|
})
|
||||||
|
self.vector_fields.append({
|
||||||
|
"name": field_name,
|
||||||
|
"dimension": vec_size,
|
||||||
|
})
|
||||||
|
if self.detected_vector_size is None:
|
||||||
|
self.detected_vector_size = vec_size
|
||||||
|
else:
|
||||||
|
# Unknown field - might be custom field stored in 'extra'
|
||||||
|
result["unknown_fields"].append(field_name)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Analyzed ES mapping: {len(result['known_fields'])} known fields, "
|
||||||
|
f"{len(result['vector_fields'])} vector fields, "
|
||||||
|
f"{len(result['unknown_fields'])} unknown fields"
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_column_definitions(self) -> list[dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Get RAGFlow column definitions for OceanBase table creation.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of column definitions
|
||||||
|
"""
|
||||||
|
columns = []
|
||||||
|
|
||||||
|
for col_name, col_def in RAGFLOW_COLUMNS.items():
|
||||||
|
columns.append({
|
||||||
|
"name": col_name,
|
||||||
|
"ob_type": col_def["ob_type"],
|
||||||
|
"nullable": col_def.get("nullable", True),
|
||||||
|
"is_primary": col_def.get("is_primary", False),
|
||||||
|
"index": col_def.get("index", False),
|
||||||
|
"is_array": col_def.get("is_array", False),
|
||||||
|
"is_json": col_def.get("is_json", False),
|
||||||
|
"default": col_def.get("default"),
|
||||||
|
})
|
||||||
|
|
||||||
|
# Add detected vector fields
|
||||||
|
for vec_field in self.vector_fields:
|
||||||
|
columns.append({
|
||||||
|
"name": vec_field["name"],
|
||||||
|
"ob_type": f"VECTOR({vec_field['dimension']})",
|
||||||
|
"nullable": True,
|
||||||
|
"is_vector": True,
|
||||||
|
"dimension": vec_field["dimension"],
|
||||||
|
})
|
||||||
|
|
||||||
|
return columns
|
||||||
|
|
||||||
|
def get_vector_fields(self) -> list[dict[str, Any]]:
|
||||||
|
"""Get list of vector fields for index creation."""
|
||||||
|
return self.vector_fields
|
||||||
|
|
||||||
|
|
||||||
|
class RAGFlowDataConverter:
|
||||||
|
"""
|
||||||
|
Convert RAGFlow ES documents to OceanBase row format.
|
||||||
|
|
||||||
|
This converter handles the specific data transformations needed
|
||||||
|
for RAGFlow's data structure.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize data converter."""
|
||||||
|
self.vector_fields: set[str] = set()
|
||||||
|
|
||||||
|
def detect_vector_fields(self, doc: dict[str, Any]) -> None:
|
||||||
|
"""Detect vector fields from a sample document."""
|
||||||
|
for key in doc.keys():
|
||||||
|
if VECTOR_FIELD_PATTERN.match(key):
|
||||||
|
self.vector_fields.add(key)
|
||||||
|
|
||||||
|
def convert_document(self, es_doc: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Convert an ES document to OceanBase row format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_doc: Elasticsearch document (with _id and _source)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary ready for OceanBase insertion
|
||||||
|
"""
|
||||||
|
# Extract _id and _source
|
||||||
|
doc_id = es_doc.get("_id")
|
||||||
|
source = es_doc.get("_source", es_doc)
|
||||||
|
|
||||||
|
row = {}
|
||||||
|
|
||||||
|
# Set document ID
|
||||||
|
if doc_id:
|
||||||
|
row["id"] = str(doc_id)
|
||||||
|
elif "id" in source:
|
||||||
|
row["id"] = str(source["id"])
|
||||||
|
|
||||||
|
# Process each field
|
||||||
|
for field_name, field_def in RAGFLOW_COLUMNS.items():
|
||||||
|
if field_name == "id":
|
||||||
|
continue # Already handled
|
||||||
|
|
||||||
|
value = source.get(field_name)
|
||||||
|
|
||||||
|
if value is None:
|
||||||
|
# Use default if available
|
||||||
|
default = field_def.get("default")
|
||||||
|
if default is not None:
|
||||||
|
row[field_name] = default
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Convert based on field type
|
||||||
|
row[field_name] = self._convert_field_value(
|
||||||
|
field_name, value, field_def
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle vector fields
|
||||||
|
for key, value in source.items():
|
||||||
|
if VECTOR_FIELD_PATTERN.match(key):
|
||||||
|
if isinstance(value, list):
|
||||||
|
row[key] = value
|
||||||
|
self.vector_fields.add(key)
|
||||||
|
|
||||||
|
# Handle unknown fields -> store in 'extra'
|
||||||
|
extra_fields = {}
|
||||||
|
for key, value in source.items():
|
||||||
|
if key not in RAGFLOW_COLUMNS and not VECTOR_FIELD_PATTERN.match(key):
|
||||||
|
extra_fields[key] = value
|
||||||
|
|
||||||
|
if extra_fields:
|
||||||
|
existing_extra = row.get("extra")
|
||||||
|
if existing_extra and isinstance(existing_extra, dict):
|
||||||
|
existing_extra.update(extra_fields)
|
||||||
|
else:
|
||||||
|
row["extra"] = json.dumps(extra_fields, ensure_ascii=False)
|
||||||
|
|
||||||
|
return row
|
||||||
|
|
||||||
|
def _convert_field_value(
|
||||||
|
self,
|
||||||
|
field_name: str,
|
||||||
|
value: Any,
|
||||||
|
field_def: dict[str, Any]
|
||||||
|
) -> Any:
|
||||||
|
"""
|
||||||
|
Convert a field value to the appropriate format for OceanBase.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
field_name: Field name
|
||||||
|
value: Original value from ES
|
||||||
|
field_def: Field definition from RAGFLOW_COLUMNS
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Converted value
|
||||||
|
"""
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
ob_type = field_def.get("ob_type", "")
|
||||||
|
is_array = field_def.get("is_array", False)
|
||||||
|
is_json = field_def.get("is_json", False)
|
||||||
|
|
||||||
|
# Handle array fields
|
||||||
|
if is_array:
|
||||||
|
return self._convert_array_value(value)
|
||||||
|
|
||||||
|
# Handle JSON fields
|
||||||
|
if is_json:
|
||||||
|
return self._convert_json_value(value)
|
||||||
|
|
||||||
|
# Handle specific types
|
||||||
|
if "Integer" in ob_type:
|
||||||
|
return self._convert_integer(value)
|
||||||
|
|
||||||
|
if "Double" in ob_type or "Float" in ob_type:
|
||||||
|
return self._convert_float(value)
|
||||||
|
|
||||||
|
if "LONGTEXT" in ob_type or "TEXT" in ob_type:
|
||||||
|
return self._convert_text(value)
|
||||||
|
|
||||||
|
if "String" in ob_type:
|
||||||
|
return self._convert_string(value, field_name)
|
||||||
|
|
||||||
|
# Default: convert to string
|
||||||
|
return str(value) if value is not None else None
|
||||||
|
|
||||||
|
def _convert_array_value(self, value: Any) -> str | None:
|
||||||
|
"""Convert array value to JSON string for OceanBase."""
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(value, str):
|
||||||
|
# Already a JSON string
|
||||||
|
try:
|
||||||
|
# Validate it's valid JSON
|
||||||
|
json.loads(value)
|
||||||
|
return value
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
# Not valid JSON, wrap in array
|
||||||
|
return json.dumps([value], ensure_ascii=False)
|
||||||
|
|
||||||
|
if isinstance(value, list):
|
||||||
|
# Clean array values
|
||||||
|
cleaned = []
|
||||||
|
for item in value:
|
||||||
|
if isinstance(item, str):
|
||||||
|
# Clean special characters
|
||||||
|
cleaned_str = item.strip()
|
||||||
|
cleaned_str = cleaned_str.replace('\\', '\\\\')
|
||||||
|
cleaned_str = cleaned_str.replace('\n', '\\n')
|
||||||
|
cleaned_str = cleaned_str.replace('\r', '\\r')
|
||||||
|
cleaned_str = cleaned_str.replace('\t', '\\t')
|
||||||
|
cleaned.append(cleaned_str)
|
||||||
|
else:
|
||||||
|
cleaned.append(item)
|
||||||
|
return json.dumps(cleaned, ensure_ascii=False)
|
||||||
|
|
||||||
|
# Single value - wrap in array
|
||||||
|
return json.dumps([value], ensure_ascii=False)
|
||||||
|
|
||||||
|
def _convert_json_value(self, value: Any) -> str | None:
|
||||||
|
"""Convert JSON value to string for OceanBase."""
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(value, str):
|
||||||
|
# Already a string, validate JSON
|
||||||
|
try:
|
||||||
|
json.loads(value)
|
||||||
|
return value
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
# Not valid JSON, return as-is
|
||||||
|
return value
|
||||||
|
|
||||||
|
if isinstance(value, (dict, list)):
|
||||||
|
return json.dumps(value, ensure_ascii=False)
|
||||||
|
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
def _convert_integer(self, value: Any) -> int | None:
|
||||||
|
"""Convert to integer."""
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(value, bool):
|
||||||
|
return 1 if value else 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
return int(value)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _convert_float(self, value: Any) -> float | None:
|
||||||
|
"""Convert to float."""
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return float(value)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _convert_text(self, value: Any) -> str | None:
|
||||||
|
"""Convert to text/longtext."""
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(value, dict):
|
||||||
|
# content_with_weight might be stored as dict
|
||||||
|
return json.dumps(value, ensure_ascii=False)
|
||||||
|
|
||||||
|
if isinstance(value, list):
|
||||||
|
return json.dumps(value, ensure_ascii=False)
|
||||||
|
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
def _convert_string(self, value: Any, field_name: str) -> str | None:
|
||||||
|
"""Convert to string with length considerations."""
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Handle kb_id which might be a list in ES
|
||||||
|
if field_name == "kb_id" and isinstance(value, list):
|
||||||
|
return str(value[0]) if value else None
|
||||||
|
|
||||||
|
if isinstance(value, (dict, list)):
|
||||||
|
return json.dumps(value, ensure_ascii=False)
|
||||||
|
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
def convert_batch(self, es_docs: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Convert a batch of ES documents.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_docs: List of Elasticsearch documents
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of dictionaries ready for OceanBase insertion
|
||||||
|
"""
|
||||||
|
return [self.convert_document(doc) for doc in es_docs]
|
||||||
|
|
||||||
|
|
||||||
|
# Backwards compatibility aliases
|
||||||
|
SchemaConverter = RAGFlowSchemaConverter
|
||||||
|
DataConverter = RAGFlowDataConverter
|
||||||
349
tools/es-to-oceanbase-migration/src/es_ob_migration/verify.py
Normal file
349
tools/es-to-oceanbase-migration/src/es_ob_migration/verify.py
Normal file
@ -0,0 +1,349 @@
|
|||||||
|
"""
|
||||||
|
Data verification for RAGFlow migration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from .es_client import ESClient
|
||||||
|
from .ob_client import OBClient
|
||||||
|
from .schema import RAGFLOW_COLUMNS, ARRAY_COLUMNS, JSON_COLUMNS
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class VerificationResult:
|
||||||
|
"""Migration verification result."""
|
||||||
|
|
||||||
|
es_index: str
|
||||||
|
ob_table: str
|
||||||
|
|
||||||
|
# Counts
|
||||||
|
es_count: int = 0
|
||||||
|
ob_count: int = 0
|
||||||
|
count_match: bool = False
|
||||||
|
count_diff: int = 0
|
||||||
|
|
||||||
|
# Sample verification
|
||||||
|
sample_size: int = 0
|
||||||
|
samples_verified: int = 0
|
||||||
|
samples_matched: int = 0
|
||||||
|
sample_match_rate: float = 0.0
|
||||||
|
|
||||||
|
# Mismatches
|
||||||
|
missing_in_ob: list[str] = field(default_factory=list)
|
||||||
|
data_mismatches: list[dict[str, Any]] = field(default_factory=list)
|
||||||
|
|
||||||
|
# Overall
|
||||||
|
passed: bool = False
|
||||||
|
message: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class MigrationVerifier:
|
||||||
|
"""Verify RAGFlow migration data consistency."""
|
||||||
|
|
||||||
|
# Fields to compare for verification
|
||||||
|
VERIFY_FIELDS = [
|
||||||
|
"id", "kb_id", "doc_id", "docnm_kwd", "content_with_weight",
|
||||||
|
"available_int", "create_time",
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
es_client: ESClient,
|
||||||
|
ob_client: OBClient,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize verifier.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_client: Elasticsearch client
|
||||||
|
ob_client: OceanBase client
|
||||||
|
"""
|
||||||
|
self.es_client = es_client
|
||||||
|
self.ob_client = ob_client
|
||||||
|
|
||||||
|
def verify(
|
||||||
|
self,
|
||||||
|
es_index: str,
|
||||||
|
ob_table: str,
|
||||||
|
sample_size: int = 100,
|
||||||
|
primary_key: str = "id",
|
||||||
|
verify_fields: list[str] | None = None,
|
||||||
|
) -> VerificationResult:
|
||||||
|
"""
|
||||||
|
Verify migration by comparing ES and OceanBase data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
es_index: Elasticsearch index name
|
||||||
|
ob_table: OceanBase table name
|
||||||
|
sample_size: Number of documents to sample for verification
|
||||||
|
primary_key: Primary key column name
|
||||||
|
verify_fields: Fields to verify (None = use defaults)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
VerificationResult with details
|
||||||
|
"""
|
||||||
|
result = VerificationResult(
|
||||||
|
es_index=es_index,
|
||||||
|
ob_table=ob_table,
|
||||||
|
)
|
||||||
|
|
||||||
|
if verify_fields is None:
|
||||||
|
verify_fields = self.VERIFY_FIELDS
|
||||||
|
|
||||||
|
# Step 1: Verify document counts
|
||||||
|
logger.info("Verifying document counts...")
|
||||||
|
|
||||||
|
result.es_count = self.es_client.count_documents(es_index)
|
||||||
|
result.ob_count = self.ob_client.count_rows(ob_table)
|
||||||
|
|
||||||
|
result.count_diff = abs(result.es_count - result.ob_count)
|
||||||
|
result.count_match = result.count_diff == 0
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Document counts - ES: {result.es_count}, OB: {result.ob_count}, "
|
||||||
|
f"Diff: {result.count_diff}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 2: Sample verification
|
||||||
|
result.sample_size = min(sample_size, result.es_count)
|
||||||
|
|
||||||
|
if result.sample_size > 0:
|
||||||
|
logger.info(f"Verifying {result.sample_size} sample documents...")
|
||||||
|
self._verify_samples(
|
||||||
|
es_index, ob_table, result, primary_key, verify_fields
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 3: Determine overall result
|
||||||
|
self._determine_result(result)
|
||||||
|
|
||||||
|
logger.info(result.message)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _verify_samples(
|
||||||
|
self,
|
||||||
|
es_index: str,
|
||||||
|
ob_table: str,
|
||||||
|
result: VerificationResult,
|
||||||
|
primary_key: str,
|
||||||
|
verify_fields: list[str],
|
||||||
|
):
|
||||||
|
"""Verify sample documents."""
|
||||||
|
# Get sample documents from ES
|
||||||
|
es_samples = self.es_client.get_sample_documents(
|
||||||
|
es_index, result.sample_size
|
||||||
|
)
|
||||||
|
|
||||||
|
for es_doc in es_samples:
|
||||||
|
result.samples_verified += 1
|
||||||
|
doc_id = es_doc.get("_id") or es_doc.get("id")
|
||||||
|
|
||||||
|
if not doc_id:
|
||||||
|
logger.warning("Document without ID found")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get corresponding document from OceanBase
|
||||||
|
ob_doc = self.ob_client.get_row_by_id(ob_table, doc_id)
|
||||||
|
|
||||||
|
if ob_doc is None:
|
||||||
|
result.missing_in_ob.append(doc_id)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Compare documents
|
||||||
|
match, differences = self._compare_documents(
|
||||||
|
es_doc, ob_doc, verify_fields
|
||||||
|
)
|
||||||
|
|
||||||
|
if match:
|
||||||
|
result.samples_matched += 1
|
||||||
|
else:
|
||||||
|
result.data_mismatches.append({
|
||||||
|
"id": doc_id,
|
||||||
|
"differences": differences,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Calculate match rate
|
||||||
|
if result.samples_verified > 0:
|
||||||
|
result.sample_match_rate = result.samples_matched / result.samples_verified
|
||||||
|
|
||||||
|
def _compare_documents(
|
||||||
|
self,
|
||||||
|
es_doc: dict[str, Any],
|
||||||
|
ob_doc: dict[str, Any],
|
||||||
|
verify_fields: list[str],
|
||||||
|
) -> tuple[bool, list[dict[str, Any]]]:
|
||||||
|
"""
|
||||||
|
Compare ES document with OceanBase row.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (match: bool, differences: list)
|
||||||
|
"""
|
||||||
|
differences = []
|
||||||
|
|
||||||
|
for field_name in verify_fields:
|
||||||
|
es_value = es_doc.get(field_name)
|
||||||
|
ob_value = ob_doc.get(field_name)
|
||||||
|
|
||||||
|
# Skip if both are None/null
|
||||||
|
if es_value is None and ob_value is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Handle special comparisons
|
||||||
|
if not self._values_equal(field_name, es_value, ob_value):
|
||||||
|
differences.append({
|
||||||
|
"field": field_name,
|
||||||
|
"es_value": es_value,
|
||||||
|
"ob_value": ob_value,
|
||||||
|
})
|
||||||
|
|
||||||
|
return len(differences) == 0, differences
|
||||||
|
|
||||||
|
def _values_equal(
|
||||||
|
self,
|
||||||
|
field_name: str,
|
||||||
|
es_value: Any,
|
||||||
|
ob_value: Any
|
||||||
|
) -> bool:
|
||||||
|
"""Compare two values with type-aware logic."""
|
||||||
|
if es_value is None and ob_value is None:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if es_value is None or ob_value is None:
|
||||||
|
# One is None, the other isn't
|
||||||
|
# For optional fields, this might be acceptable
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Handle array fields (stored as JSON strings in OB)
|
||||||
|
if field_name in ARRAY_COLUMNS:
|
||||||
|
if isinstance(ob_value, str):
|
||||||
|
try:
|
||||||
|
ob_value = json.loads(ob_value)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
if isinstance(es_value, list) and isinstance(ob_value, list):
|
||||||
|
return set(str(x) for x in es_value) == set(str(x) for x in ob_value)
|
||||||
|
|
||||||
|
# Handle JSON fields
|
||||||
|
if field_name in JSON_COLUMNS:
|
||||||
|
if isinstance(ob_value, str):
|
||||||
|
try:
|
||||||
|
ob_value = json.loads(ob_value)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
if isinstance(es_value, str):
|
||||||
|
try:
|
||||||
|
es_value = json.loads(es_value)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
return es_value == ob_value
|
||||||
|
|
||||||
|
# Handle content_with_weight which might be dict or string
|
||||||
|
if field_name == "content_with_weight":
|
||||||
|
if isinstance(ob_value, str) and isinstance(es_value, dict):
|
||||||
|
try:
|
||||||
|
ob_value = json.loads(ob_value)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Handle kb_id which might be list in ES
|
||||||
|
if field_name == "kb_id":
|
||||||
|
if isinstance(es_value, list) and len(es_value) > 0:
|
||||||
|
es_value = es_value[0]
|
||||||
|
|
||||||
|
# Standard comparison
|
||||||
|
return str(es_value) == str(ob_value)
|
||||||
|
|
||||||
|
def _determine_result(self, result: VerificationResult):
|
||||||
|
"""Determine overall verification result."""
|
||||||
|
# Allow small count differences (e.g., documents added during migration)
|
||||||
|
count_tolerance = 0.01 # 1% tolerance
|
||||||
|
count_ok = (
|
||||||
|
result.count_match or
|
||||||
|
(result.es_count > 0 and result.count_diff / result.es_count <= count_tolerance)
|
||||||
|
)
|
||||||
|
|
||||||
|
if count_ok and result.sample_match_rate >= 0.99:
|
||||||
|
result.passed = True
|
||||||
|
result.message = (
|
||||||
|
f"Verification PASSED. "
|
||||||
|
f"ES: {result.es_count:,}, OB: {result.ob_count:,}. "
|
||||||
|
f"Sample match rate: {result.sample_match_rate:.2%}"
|
||||||
|
)
|
||||||
|
elif count_ok and result.sample_match_rate >= 0.95:
|
||||||
|
result.passed = True
|
||||||
|
result.message = (
|
||||||
|
f"Verification PASSED with warnings. "
|
||||||
|
f"ES: {result.es_count:,}, OB: {result.ob_count:,}. "
|
||||||
|
f"Sample match rate: {result.sample_match_rate:.2%}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
result.passed = False
|
||||||
|
issues = []
|
||||||
|
if not count_ok:
|
||||||
|
issues.append(
|
||||||
|
f"Count mismatch (ES: {result.es_count}, OB: {result.ob_count}, diff: {result.count_diff})"
|
||||||
|
)
|
||||||
|
if result.sample_match_rate < 0.95:
|
||||||
|
issues.append(f"Low sample match rate: {result.sample_match_rate:.2%}")
|
||||||
|
if result.missing_in_ob:
|
||||||
|
issues.append(f"{len(result.missing_in_ob)} documents missing in OB")
|
||||||
|
result.message = f"Verification FAILED: {'; '.join(issues)}"
|
||||||
|
|
||||||
|
def generate_report(self, result: VerificationResult) -> str:
|
||||||
|
"""Generate a verification report."""
|
||||||
|
lines = [
|
||||||
|
"",
|
||||||
|
"=" * 60,
|
||||||
|
"Migration Verification Report",
|
||||||
|
"=" * 60,
|
||||||
|
f"ES Index: {result.es_index}",
|
||||||
|
f"OB Table: {result.ob_table}",
|
||||||
|
]
|
||||||
|
|
||||||
|
lines.extend([
|
||||||
|
"",
|
||||||
|
"Document Counts:",
|
||||||
|
f" Elasticsearch: {result.es_count:,}",
|
||||||
|
f" OceanBase: {result.ob_count:,}",
|
||||||
|
f" Difference: {result.count_diff:,}",
|
||||||
|
f" Match: {'Yes' if result.count_match else 'No'}",
|
||||||
|
"",
|
||||||
|
"Sample Verification:",
|
||||||
|
f" Sample Size: {result.sample_size}",
|
||||||
|
f" Verified: {result.samples_verified}",
|
||||||
|
f" Matched: {result.samples_matched}",
|
||||||
|
f" Match Rate: {result.sample_match_rate:.2%}",
|
||||||
|
"",
|
||||||
|
])
|
||||||
|
|
||||||
|
if result.missing_in_ob:
|
||||||
|
lines.append(f"Missing in OceanBase ({len(result.missing_in_ob)}):")
|
||||||
|
for doc_id in result.missing_in_ob[:5]:
|
||||||
|
lines.append(f" - {doc_id}")
|
||||||
|
if len(result.missing_in_ob) > 5:
|
||||||
|
lines.append(f" ... and {len(result.missing_in_ob) - 5} more")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
if result.data_mismatches:
|
||||||
|
lines.append(f"Data Mismatches ({len(result.data_mismatches)}):")
|
||||||
|
for mismatch in result.data_mismatches[:3]:
|
||||||
|
lines.append(f" - ID: {mismatch['id']}")
|
||||||
|
for diff in mismatch.get("differences", [])[:2]:
|
||||||
|
lines.append(f" {diff['field']}: ES={diff['es_value']}, OB={diff['ob_value']}")
|
||||||
|
if len(result.data_mismatches) > 3:
|
||||||
|
lines.append(f" ... and {len(result.data_mismatches) - 3} more")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
lines.extend([
|
||||||
|
"=" * 60,
|
||||||
|
f"Result: {'PASSED' if result.passed else 'FAILED'}",
|
||||||
|
result.message,
|
||||||
|
"=" * 60,
|
||||||
|
"",
|
||||||
|
])
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
1
tools/es-to-oceanbase-migration/tests/__init__.py
Normal file
1
tools/es-to-oceanbase-migration/tests/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# Tests for ES to OceanBase migration tool
|
||||||
321
tools/es-to-oceanbase-migration/tests/test_progress.py
Normal file
321
tools/es-to-oceanbase-migration/tests/test_progress.py
Normal file
@ -0,0 +1,321 @@
|
|||||||
|
"""
|
||||||
|
Tests for progress tracking and resume capability.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
import pytest
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from es_ob_migration.progress import MigrationProgress, ProgressManager
|
||||||
|
|
||||||
|
|
||||||
|
class TestMigrationProgress:
|
||||||
|
"""Test MigrationProgress dataclass."""
|
||||||
|
|
||||||
|
def test_create_basic_progress(self):
|
||||||
|
"""Test creating a basic progress object."""
|
||||||
|
progress = MigrationProgress(
|
||||||
|
es_index="ragflow_test",
|
||||||
|
ob_table="ragflow_test",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert progress.es_index == "ragflow_test"
|
||||||
|
assert progress.ob_table == "ragflow_test"
|
||||||
|
assert progress.total_documents == 0
|
||||||
|
assert progress.migrated_documents == 0
|
||||||
|
assert progress.status == "pending"
|
||||||
|
assert progress.started_at != ""
|
||||||
|
assert progress.updated_at != ""
|
||||||
|
|
||||||
|
def test_create_progress_with_counts(self):
|
||||||
|
"""Test creating progress with document counts."""
|
||||||
|
progress = MigrationProgress(
|
||||||
|
es_index="ragflow_test",
|
||||||
|
ob_table="ragflow_test",
|
||||||
|
total_documents=1000,
|
||||||
|
migrated_documents=500,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert progress.total_documents == 1000
|
||||||
|
assert progress.migrated_documents == 500
|
||||||
|
|
||||||
|
def test_progress_default_values(self):
|
||||||
|
"""Test default values."""
|
||||||
|
progress = MigrationProgress(
|
||||||
|
es_index="test_index",
|
||||||
|
ob_table="test_table",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert progress.failed_documents == 0
|
||||||
|
assert progress.last_sort_values == []
|
||||||
|
assert progress.last_batch_ids == []
|
||||||
|
assert progress.error_message == ""
|
||||||
|
assert progress.schema_converted is False
|
||||||
|
assert progress.table_created is False
|
||||||
|
assert progress.indexes_created is False
|
||||||
|
|
||||||
|
def test_progress_status_values(self):
|
||||||
|
"""Test various status values."""
|
||||||
|
for status in ["pending", "running", "completed", "failed", "paused"]:
|
||||||
|
progress = MigrationProgress(
|
||||||
|
es_index="test",
|
||||||
|
ob_table="test",
|
||||||
|
status=status,
|
||||||
|
)
|
||||||
|
assert progress.status == status
|
||||||
|
|
||||||
|
|
||||||
|
class TestProgressManager:
|
||||||
|
"""Test ProgressManager class."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_dir(self):
|
||||||
|
"""Create a temporary directory for tests."""
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
yield tmpdir
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def manager(self, temp_dir):
|
||||||
|
"""Create a ProgressManager with temp directory."""
|
||||||
|
return ProgressManager(progress_dir=temp_dir)
|
||||||
|
|
||||||
|
def test_create_progress_manager(self, temp_dir):
|
||||||
|
"""Test creating a progress manager."""
|
||||||
|
manager = ProgressManager(progress_dir=temp_dir)
|
||||||
|
assert manager.progress_dir.exists()
|
||||||
|
|
||||||
|
def test_create_progress_manager_creates_dir(self, temp_dir):
|
||||||
|
"""Test that progress manager creates directory."""
|
||||||
|
new_dir = os.path.join(temp_dir, "new_progress")
|
||||||
|
manager = ProgressManager(progress_dir=new_dir)
|
||||||
|
assert Path(new_dir).exists()
|
||||||
|
|
||||||
|
def test_create_progress(self, manager):
|
||||||
|
"""Test creating new progress."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_abc123",
|
||||||
|
ob_table="ragflow_abc123",
|
||||||
|
total_documents=1000,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert progress.es_index == "ragflow_abc123"
|
||||||
|
assert progress.ob_table == "ragflow_abc123"
|
||||||
|
assert progress.total_documents == 1000
|
||||||
|
assert progress.status == "running"
|
||||||
|
|
||||||
|
def test_save_and_load_progress(self, manager):
|
||||||
|
"""Test saving and loading progress."""
|
||||||
|
# Create and save
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_test",
|
||||||
|
ob_table="ragflow_test",
|
||||||
|
total_documents=500,
|
||||||
|
)
|
||||||
|
progress.migrated_documents = 250
|
||||||
|
progress.last_sort_values = ["doc_250", 1234567890]
|
||||||
|
manager.save_progress(progress)
|
||||||
|
|
||||||
|
# Load
|
||||||
|
loaded = manager.load_progress("ragflow_test", "ragflow_test")
|
||||||
|
|
||||||
|
assert loaded is not None
|
||||||
|
assert loaded.es_index == "ragflow_test"
|
||||||
|
assert loaded.total_documents == 500
|
||||||
|
assert loaded.migrated_documents == 250
|
||||||
|
assert loaded.last_sort_values == ["doc_250", 1234567890]
|
||||||
|
|
||||||
|
def test_load_nonexistent_progress(self, manager):
|
||||||
|
"""Test loading progress that doesn't exist."""
|
||||||
|
loaded = manager.load_progress("nonexistent", "nonexistent")
|
||||||
|
assert loaded is None
|
||||||
|
|
||||||
|
def test_delete_progress(self, manager):
|
||||||
|
"""Test deleting progress."""
|
||||||
|
# Create progress
|
||||||
|
manager.create_progress(
|
||||||
|
es_index="ragflow_delete_test",
|
||||||
|
ob_table="ragflow_delete_test",
|
||||||
|
total_documents=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify it exists
|
||||||
|
assert manager.load_progress("ragflow_delete_test", "ragflow_delete_test") is not None
|
||||||
|
|
||||||
|
# Delete
|
||||||
|
manager.delete_progress("ragflow_delete_test", "ragflow_delete_test")
|
||||||
|
|
||||||
|
# Verify it's gone
|
||||||
|
assert manager.load_progress("ragflow_delete_test", "ragflow_delete_test") is None
|
||||||
|
|
||||||
|
def test_update_progress(self, manager):
|
||||||
|
"""Test updating progress."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_update",
|
||||||
|
ob_table="ragflow_update",
|
||||||
|
total_documents=1000,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update
|
||||||
|
manager.update_progress(
|
||||||
|
progress,
|
||||||
|
migrated_count=100,
|
||||||
|
last_sort_values=["doc_100", 9999],
|
||||||
|
last_batch_ids=["id1", "id2", "id3"],
|
||||||
|
)
|
||||||
|
|
||||||
|
assert progress.migrated_documents == 100
|
||||||
|
assert progress.last_sort_values == ["doc_100", 9999]
|
||||||
|
assert progress.last_batch_ids == ["id1", "id2", "id3"]
|
||||||
|
|
||||||
|
def test_update_progress_multiple_batches(self, manager):
|
||||||
|
"""Test updating progress multiple times."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_multi",
|
||||||
|
ob_table="ragflow_multi",
|
||||||
|
total_documents=1000,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update multiple times
|
||||||
|
for i in range(1, 11):
|
||||||
|
manager.update_progress(progress, migrated_count=100)
|
||||||
|
|
||||||
|
assert progress.migrated_documents == 1000
|
||||||
|
|
||||||
|
def test_mark_completed(self, manager):
|
||||||
|
"""Test marking migration as completed."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_complete",
|
||||||
|
ob_table="ragflow_complete",
|
||||||
|
total_documents=100,
|
||||||
|
)
|
||||||
|
progress.migrated_documents = 100
|
||||||
|
|
||||||
|
manager.mark_completed(progress)
|
||||||
|
|
||||||
|
assert progress.status == "completed"
|
||||||
|
|
||||||
|
def test_mark_failed(self, manager):
|
||||||
|
"""Test marking migration as failed."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_fail",
|
||||||
|
ob_table="ragflow_fail",
|
||||||
|
total_documents=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
manager.mark_failed(progress, "Connection timeout")
|
||||||
|
|
||||||
|
assert progress.status == "failed"
|
||||||
|
assert progress.error_message == "Connection timeout"
|
||||||
|
|
||||||
|
def test_mark_paused(self, manager):
|
||||||
|
"""Test marking migration as paused."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_pause",
|
||||||
|
ob_table="ragflow_pause",
|
||||||
|
total_documents=1000,
|
||||||
|
)
|
||||||
|
progress.migrated_documents = 500
|
||||||
|
|
||||||
|
manager.mark_paused(progress)
|
||||||
|
|
||||||
|
assert progress.status == "paused"
|
||||||
|
|
||||||
|
def test_can_resume_running(self, manager):
|
||||||
|
"""Test can_resume for running migration."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_resume_running",
|
||||||
|
ob_table="ragflow_resume_running",
|
||||||
|
total_documents=1000,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert manager.can_resume("ragflow_resume_running", "ragflow_resume_running") is True
|
||||||
|
|
||||||
|
def test_can_resume_paused(self, manager):
|
||||||
|
"""Test can_resume for paused migration."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_resume_paused",
|
||||||
|
ob_table="ragflow_resume_paused",
|
||||||
|
total_documents=1000,
|
||||||
|
)
|
||||||
|
manager.mark_paused(progress)
|
||||||
|
|
||||||
|
assert manager.can_resume("ragflow_resume_paused", "ragflow_resume_paused") is True
|
||||||
|
|
||||||
|
def test_can_resume_completed(self, manager):
|
||||||
|
"""Test can_resume for completed migration."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_resume_complete",
|
||||||
|
ob_table="ragflow_resume_complete",
|
||||||
|
total_documents=100,
|
||||||
|
)
|
||||||
|
progress.migrated_documents = 100
|
||||||
|
manager.mark_completed(progress)
|
||||||
|
|
||||||
|
# Completed migrations should not be resumed
|
||||||
|
assert manager.can_resume("ragflow_resume_complete", "ragflow_resume_complete") is False
|
||||||
|
|
||||||
|
def test_can_resume_nonexistent(self, manager):
|
||||||
|
"""Test can_resume for nonexistent migration."""
|
||||||
|
assert manager.can_resume("nonexistent", "nonexistent") is False
|
||||||
|
|
||||||
|
def test_get_resume_info(self, manager):
|
||||||
|
"""Test getting resume information."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_info",
|
||||||
|
ob_table="ragflow_info",
|
||||||
|
total_documents=1000,
|
||||||
|
)
|
||||||
|
progress.migrated_documents = 500
|
||||||
|
progress.last_sort_values = ["doc_500", 12345]
|
||||||
|
progress.schema_converted = True
|
||||||
|
progress.table_created = True
|
||||||
|
manager.save_progress(progress)
|
||||||
|
|
||||||
|
info = manager.get_resume_info("ragflow_info", "ragflow_info")
|
||||||
|
|
||||||
|
assert info is not None
|
||||||
|
assert info["migrated_documents"] == 500
|
||||||
|
assert info["total_documents"] == 1000
|
||||||
|
assert info["last_sort_values"] == ["doc_500", 12345]
|
||||||
|
assert info["schema_converted"] is True
|
||||||
|
assert info["table_created"] is True
|
||||||
|
assert info["status"] == "running"
|
||||||
|
|
||||||
|
def test_get_resume_info_nonexistent(self, manager):
|
||||||
|
"""Test getting resume info for nonexistent migration."""
|
||||||
|
info = manager.get_resume_info("nonexistent", "nonexistent")
|
||||||
|
assert info is None
|
||||||
|
|
||||||
|
def test_progress_file_path(self, manager):
|
||||||
|
"""Test progress file naming."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_abc123",
|
||||||
|
ob_table="ragflow_abc123",
|
||||||
|
total_documents=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
expected_file = manager.progress_dir / "ragflow_abc123_to_ragflow_abc123.json"
|
||||||
|
assert expected_file.exists()
|
||||||
|
|
||||||
|
def test_progress_file_content(self, manager):
|
||||||
|
"""Test progress file JSON content."""
|
||||||
|
progress = manager.create_progress(
|
||||||
|
es_index="ragflow_json",
|
||||||
|
ob_table="ragflow_json",
|
||||||
|
total_documents=100,
|
||||||
|
)
|
||||||
|
progress.migrated_documents = 50
|
||||||
|
manager.save_progress(progress)
|
||||||
|
|
||||||
|
# Read file directly
|
||||||
|
progress_file = manager.progress_dir / "ragflow_json_to_ragflow_json.json"
|
||||||
|
with open(progress_file) as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
assert data["es_index"] == "ragflow_json"
|
||||||
|
assert data["ob_table"] == "ragflow_json"
|
||||||
|
assert data["total_documents"] == 100
|
||||||
|
assert data["migrated_documents"] == 50
|
||||||
649
tools/es-to-oceanbase-migration/tests/test_schema.py
Normal file
649
tools/es-to-oceanbase-migration/tests/test_schema.py
Normal file
@ -0,0 +1,649 @@
|
|||||||
|
"""
|
||||||
|
Tests for RAGFlow schema conversion.
|
||||||
|
|
||||||
|
This module tests:
|
||||||
|
- RAGFlowSchemaConverter: Analyzes ES mappings and generates OB column definitions
|
||||||
|
- RAGFlowDataConverter: Converts ES documents to OceanBase row format
|
||||||
|
- Vector field pattern matching
|
||||||
|
- Schema constants
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import pytest
|
||||||
|
from es_ob_migration.schema import (
|
||||||
|
RAGFlowSchemaConverter,
|
||||||
|
RAGFlowDataConverter,
|
||||||
|
RAGFLOW_COLUMNS,
|
||||||
|
ARRAY_COLUMNS,
|
||||||
|
JSON_COLUMNS,
|
||||||
|
VECTOR_FIELD_PATTERN,
|
||||||
|
FTS_COLUMNS_ORIGIN,
|
||||||
|
FTS_COLUMNS_TKS,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRAGFlowSchemaConverter:
|
||||||
|
"""Test RAGFlowSchemaConverter class."""
|
||||||
|
|
||||||
|
def test_analyze_ragflow_mapping(self):
|
||||||
|
"""Test analyzing a RAGFlow ES mapping."""
|
||||||
|
converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
# Simulate a RAGFlow ES mapping
|
||||||
|
es_mapping = {
|
||||||
|
"properties": {
|
||||||
|
"id": {"type": "keyword"},
|
||||||
|
"kb_id": {"type": "keyword"},
|
||||||
|
"doc_id": {"type": "keyword"},
|
||||||
|
"docnm_kwd": {"type": "keyword"},
|
||||||
|
"content_with_weight": {"type": "text"},
|
||||||
|
"content_ltks": {"type": "text"},
|
||||||
|
"available_int": {"type": "integer"},
|
||||||
|
"important_kwd": {"type": "keyword"},
|
||||||
|
"q_768_vec": {"type": "dense_vector", "dims": 768},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
analysis = converter.analyze_es_mapping(es_mapping)
|
||||||
|
|
||||||
|
# Check known fields
|
||||||
|
assert "id" in analysis["known_fields"]
|
||||||
|
assert "kb_id" in analysis["known_fields"]
|
||||||
|
assert "content_with_weight" in analysis["known_fields"]
|
||||||
|
|
||||||
|
# Check vector fields
|
||||||
|
assert len(analysis["vector_fields"]) == 1
|
||||||
|
assert analysis["vector_fields"][0]["name"] == "q_768_vec"
|
||||||
|
assert analysis["vector_fields"][0]["dimension"] == 768
|
||||||
|
|
||||||
|
def test_detect_vector_size(self):
|
||||||
|
"""Test automatic vector size detection."""
|
||||||
|
converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
es_mapping = {
|
||||||
|
"properties": {
|
||||||
|
"q_1536_vec": {"type": "dense_vector", "dims": 1536},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
converter.analyze_es_mapping(es_mapping)
|
||||||
|
|
||||||
|
assert converter.detected_vector_size == 1536
|
||||||
|
|
||||||
|
def test_unknown_fields(self):
|
||||||
|
"""Test that unknown fields are properly identified."""
|
||||||
|
converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
es_mapping = {
|
||||||
|
"properties": {
|
||||||
|
"id": {"type": "keyword"},
|
||||||
|
"custom_field": {"type": "text"},
|
||||||
|
"another_field": {"type": "integer"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
analysis = converter.analyze_es_mapping(es_mapping)
|
||||||
|
|
||||||
|
assert "custom_field" in analysis["unknown_fields"]
|
||||||
|
assert "another_field" in analysis["unknown_fields"]
|
||||||
|
|
||||||
|
def test_get_column_definitions(self):
|
||||||
|
"""Test getting RAGFlow column definitions."""
|
||||||
|
converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
# First analyze to detect vector fields
|
||||||
|
es_mapping = {
|
||||||
|
"properties": {
|
||||||
|
"q_768_vec": {"type": "dense_vector", "dims": 768},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
converter.analyze_es_mapping(es_mapping)
|
||||||
|
|
||||||
|
columns = converter.get_column_definitions()
|
||||||
|
|
||||||
|
# Check that all RAGFlow columns are present
|
||||||
|
column_names = [c["name"] for c in columns]
|
||||||
|
|
||||||
|
for col_name in RAGFLOW_COLUMNS:
|
||||||
|
assert col_name in column_names, f"Missing column: {col_name}"
|
||||||
|
|
||||||
|
# Check vector column is added
|
||||||
|
assert "q_768_vec" in column_names
|
||||||
|
|
||||||
|
|
||||||
|
class TestRAGFlowDataConverter:
|
||||||
|
"""Test RAGFlowDataConverter class."""
|
||||||
|
|
||||||
|
def test_convert_basic_document(self):
|
||||||
|
"""Test converting a basic RAGFlow document."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "test-id-123",
|
||||||
|
"_source": {
|
||||||
|
"id": "test-id-123",
|
||||||
|
"kb_id": "kb-001",
|
||||||
|
"doc_id": "doc-001",
|
||||||
|
"docnm_kwd": "test_document.pdf",
|
||||||
|
"content_with_weight": "This is test content",
|
||||||
|
"available_int": 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
assert row["id"] == "test-id-123"
|
||||||
|
assert row["kb_id"] == "kb-001"
|
||||||
|
assert row["doc_id"] == "doc-001"
|
||||||
|
assert row["docnm_kwd"] == "test_document.pdf"
|
||||||
|
assert row["content_with_weight"] == "This is test content"
|
||||||
|
assert row["available_int"] == 1
|
||||||
|
|
||||||
|
def test_convert_with_vector(self):
|
||||||
|
"""Test converting document with vector embedding."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
embedding = [0.1] * 768
|
||||||
|
es_doc = {
|
||||||
|
"_id": "vec-doc-001",
|
||||||
|
"_source": {
|
||||||
|
"id": "vec-doc-001",
|
||||||
|
"kb_id": "kb-001",
|
||||||
|
"q_768_vec": embedding,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
assert row["id"] == "vec-doc-001"
|
||||||
|
assert row["q_768_vec"] == embedding
|
||||||
|
assert "q_768_vec" in converter.vector_fields
|
||||||
|
|
||||||
|
def test_convert_array_fields(self):
|
||||||
|
"""Test converting array fields."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "array-doc",
|
||||||
|
"_source": {
|
||||||
|
"id": "array-doc",
|
||||||
|
"kb_id": "kb-001",
|
||||||
|
"important_kwd": ["keyword1", "keyword2", "keyword3"],
|
||||||
|
"question_kwd": ["What is this?", "How does it work?"],
|
||||||
|
"tag_kwd": ["tag1", "tag2"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
# Array fields should be JSON strings
|
||||||
|
assert isinstance(row["important_kwd"], str)
|
||||||
|
parsed = json.loads(row["important_kwd"])
|
||||||
|
assert parsed == ["keyword1", "keyword2", "keyword3"]
|
||||||
|
|
||||||
|
def test_convert_json_fields(self):
|
||||||
|
"""Test converting JSON fields."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "json-doc",
|
||||||
|
"_source": {
|
||||||
|
"id": "json-doc",
|
||||||
|
"kb_id": "kb-001",
|
||||||
|
"tag_feas": {"tag1": 0.8, "tag2": 0.5},
|
||||||
|
"metadata": {"author": "John", "date": "2024-01-01"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
# JSON fields should be JSON strings
|
||||||
|
assert isinstance(row["tag_feas"], str)
|
||||||
|
assert isinstance(row["metadata"], str)
|
||||||
|
|
||||||
|
tag_feas = json.loads(row["tag_feas"])
|
||||||
|
assert tag_feas == {"tag1": 0.8, "tag2": 0.5}
|
||||||
|
|
||||||
|
def test_convert_unknown_fields_to_extra(self):
|
||||||
|
"""Test that unknown fields are stored in 'extra'."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "extra-doc",
|
||||||
|
"_source": {
|
||||||
|
"id": "extra-doc",
|
||||||
|
"kb_id": "kb-001",
|
||||||
|
"custom_field": "custom_value",
|
||||||
|
"another_custom": 123,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
assert "extra" in row
|
||||||
|
extra = json.loads(row["extra"])
|
||||||
|
assert extra["custom_field"] == "custom_value"
|
||||||
|
assert extra["another_custom"] == 123
|
||||||
|
|
||||||
|
def test_convert_kb_id_list(self):
|
||||||
|
"""Test converting kb_id when it's a list (ES format)."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "kb-list-doc",
|
||||||
|
"_source": {
|
||||||
|
"id": "kb-list-doc",
|
||||||
|
"kb_id": ["kb-001", "kb-002"], # Some ES docs have list
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
# Should take first element
|
||||||
|
assert row["kb_id"] == "kb-001"
|
||||||
|
|
||||||
|
def test_convert_content_with_weight_dict(self):
|
||||||
|
"""Test converting content_with_weight when it's a dict."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "content-dict-doc",
|
||||||
|
"_source": {
|
||||||
|
"id": "content-dict-doc",
|
||||||
|
"kb_id": "kb-001",
|
||||||
|
"content_with_weight": {
|
||||||
|
"text": "Some content",
|
||||||
|
"weight": 1.0,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
# Dict should be JSON serialized
|
||||||
|
assert isinstance(row["content_with_weight"], str)
|
||||||
|
parsed = json.loads(row["content_with_weight"])
|
||||||
|
assert parsed["text"] == "Some content"
|
||||||
|
|
||||||
|
def test_convert_batch(self):
|
||||||
|
"""Test batch conversion."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_docs = [
|
||||||
|
{"_id": f"doc-{i}", "_source": {"id": f"doc-{i}", "kb_id": "kb-001"}}
|
||||||
|
for i in range(5)
|
||||||
|
]
|
||||||
|
|
||||||
|
rows = converter.convert_batch(es_docs)
|
||||||
|
|
||||||
|
assert len(rows) == 5
|
||||||
|
for i, row in enumerate(rows):
|
||||||
|
assert row["id"] == f"doc-{i}"
|
||||||
|
|
||||||
|
|
||||||
|
class TestVectorFieldPattern:
|
||||||
|
"""Test vector field pattern matching."""
|
||||||
|
|
||||||
|
def test_valid_patterns(self):
|
||||||
|
"""Test valid vector field patterns."""
|
||||||
|
valid_names = [
|
||||||
|
"q_768_vec",
|
||||||
|
"q_1024_vec",
|
||||||
|
"q_1536_vec",
|
||||||
|
"q_3072_vec",
|
||||||
|
]
|
||||||
|
|
||||||
|
for name in valid_names:
|
||||||
|
match = VECTOR_FIELD_PATTERN.match(name)
|
||||||
|
assert match is not None, f"Should match: {name}"
|
||||||
|
|
||||||
|
def test_invalid_patterns(self):
|
||||||
|
"""Test invalid vector field patterns."""
|
||||||
|
invalid_names = [
|
||||||
|
"q_vec",
|
||||||
|
"768_vec",
|
||||||
|
"q_768",
|
||||||
|
"vector_768",
|
||||||
|
"content_with_weight",
|
||||||
|
]
|
||||||
|
|
||||||
|
for name in invalid_names:
|
||||||
|
match = VECTOR_FIELD_PATTERN.match(name)
|
||||||
|
assert match is None, f"Should not match: {name}"
|
||||||
|
|
||||||
|
def test_extract_dimension(self):
|
||||||
|
"""Test extracting dimension from pattern."""
|
||||||
|
match = VECTOR_FIELD_PATTERN.match("q_1536_vec")
|
||||||
|
assert match is not None
|
||||||
|
assert int(match.group("vector_size")) == 1536
|
||||||
|
|
||||||
|
|
||||||
|
class TestConstants:
|
||||||
|
"""Test schema constants."""
|
||||||
|
|
||||||
|
def test_array_columns(self):
|
||||||
|
"""Test ARRAY_COLUMNS list."""
|
||||||
|
expected = [
|
||||||
|
"important_kwd", "question_kwd", "tag_kwd", "source_id",
|
||||||
|
"entities_kwd", "position_int", "page_num_int", "top_int"
|
||||||
|
]
|
||||||
|
|
||||||
|
for col in expected:
|
||||||
|
assert col in ARRAY_COLUMNS, f"Missing array column: {col}"
|
||||||
|
|
||||||
|
def test_json_columns(self):
|
||||||
|
"""Test JSON_COLUMNS list."""
|
||||||
|
expected = ["tag_feas", "metadata", "extra"]
|
||||||
|
|
||||||
|
for col in expected:
|
||||||
|
assert col in JSON_COLUMNS, f"Missing JSON column: {col}"
|
||||||
|
|
||||||
|
def test_ragflow_columns_completeness(self):
|
||||||
|
"""Test that RAGFLOW_COLUMNS has all required fields."""
|
||||||
|
required_fields = [
|
||||||
|
"id", "kb_id", "doc_id", "content_with_weight",
|
||||||
|
"available_int", "metadata", "extra",
|
||||||
|
]
|
||||||
|
|
||||||
|
for field in required_fields:
|
||||||
|
assert field in RAGFLOW_COLUMNS, f"Missing required field: {field}"
|
||||||
|
|
||||||
|
def test_fts_columns(self):
|
||||||
|
"""Test fulltext search column lists."""
|
||||||
|
assert "content_with_weight" in FTS_COLUMNS_ORIGIN
|
||||||
|
assert "content_ltks" in FTS_COLUMNS_TKS
|
||||||
|
|
||||||
|
def test_ragflow_columns_types(self):
|
||||||
|
"""Test column type definitions."""
|
||||||
|
# Primary key
|
||||||
|
assert RAGFLOW_COLUMNS["id"]["is_primary"] is True
|
||||||
|
assert RAGFLOW_COLUMNS["id"]["nullable"] is False
|
||||||
|
|
||||||
|
# Indexed columns
|
||||||
|
assert RAGFLOW_COLUMNS["kb_id"]["index"] is True
|
||||||
|
assert RAGFLOW_COLUMNS["doc_id"]["index"] is True
|
||||||
|
|
||||||
|
# Array columns
|
||||||
|
assert RAGFLOW_COLUMNS["important_kwd"]["is_array"] is True
|
||||||
|
assert RAGFLOW_COLUMNS["question_kwd"]["is_array"] is True
|
||||||
|
|
||||||
|
# JSON columns
|
||||||
|
assert RAGFLOW_COLUMNS["metadata"]["is_json"] is True
|
||||||
|
assert RAGFLOW_COLUMNS["extra"]["is_json"] is True
|
||||||
|
|
||||||
|
|
||||||
|
class TestRAGFlowSchemaConverterEdgeCases:
|
||||||
|
"""Test edge cases for RAGFlowSchemaConverter."""
|
||||||
|
|
||||||
|
def test_empty_mapping(self):
|
||||||
|
"""Test analyzing empty mapping."""
|
||||||
|
converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
analysis = converter.analyze_es_mapping({})
|
||||||
|
|
||||||
|
assert analysis["known_fields"] == []
|
||||||
|
assert analysis["vector_fields"] == []
|
||||||
|
assert analysis["unknown_fields"] == []
|
||||||
|
|
||||||
|
def test_mapping_without_properties(self):
|
||||||
|
"""Test mapping without properties key."""
|
||||||
|
converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
analysis = converter.analyze_es_mapping({"some_other_key": {}})
|
||||||
|
|
||||||
|
assert analysis["known_fields"] == []
|
||||||
|
|
||||||
|
def test_multiple_vector_fields(self):
|
||||||
|
"""Test detecting multiple vector fields."""
|
||||||
|
converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
es_mapping = {
|
||||||
|
"properties": {
|
||||||
|
"q_768_vec": {"type": "dense_vector", "dims": 768},
|
||||||
|
"q_1024_vec": {"type": "dense_vector", "dims": 1024},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
analysis = converter.analyze_es_mapping(es_mapping)
|
||||||
|
|
||||||
|
assert len(analysis["vector_fields"]) == 2
|
||||||
|
# First detected should be set
|
||||||
|
assert converter.detected_vector_size in [768, 1024]
|
||||||
|
|
||||||
|
def test_get_column_definitions_without_analysis(self):
|
||||||
|
"""Test getting columns without prior analysis."""
|
||||||
|
converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
columns = converter.get_column_definitions()
|
||||||
|
|
||||||
|
# Should have all RAGFlow columns but no vector columns
|
||||||
|
column_names = [c["name"] for c in columns]
|
||||||
|
assert "id" in column_names
|
||||||
|
assert "kb_id" in column_names
|
||||||
|
|
||||||
|
def test_get_vector_fields(self):
|
||||||
|
"""Test getting vector fields."""
|
||||||
|
converter = RAGFlowSchemaConverter()
|
||||||
|
|
||||||
|
es_mapping = {
|
||||||
|
"properties": {
|
||||||
|
"q_1536_vec": {"type": "dense_vector", "dims": 1536},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
converter.analyze_es_mapping(es_mapping)
|
||||||
|
|
||||||
|
vec_fields = converter.get_vector_fields()
|
||||||
|
|
||||||
|
assert len(vec_fields) == 1
|
||||||
|
assert vec_fields[0]["name"] == "q_1536_vec"
|
||||||
|
assert vec_fields[0]["dimension"] == 1536
|
||||||
|
|
||||||
|
|
||||||
|
class TestRAGFlowDataConverterEdgeCases:
|
||||||
|
"""Test edge cases for RAGFlowDataConverter."""
|
||||||
|
|
||||||
|
def test_convert_empty_document(self):
|
||||||
|
"""Test converting empty document."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {"_id": "empty_doc", "_source": {}}
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
assert row["id"] == "empty_doc"
|
||||||
|
|
||||||
|
def test_convert_document_without_source(self):
|
||||||
|
"""Test converting document without _source."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {"_id": "no_source", "id": "no_source", "kb_id": "kb_001"}
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
assert row["id"] == "no_source"
|
||||||
|
assert row["kb_id"] == "kb_001"
|
||||||
|
|
||||||
|
def test_convert_boolean_to_integer(self):
|
||||||
|
"""Test converting boolean to integer."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "bool_doc",
|
||||||
|
"_source": {
|
||||||
|
"id": "bool_doc",
|
||||||
|
"kb_id": "kb_001",
|
||||||
|
"available_int": True,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
assert row["available_int"] == 1
|
||||||
|
|
||||||
|
def test_convert_invalid_integer(self):
|
||||||
|
"""Test converting invalid integer value."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "invalid_int",
|
||||||
|
"_source": {
|
||||||
|
"id": "invalid_int",
|
||||||
|
"kb_id": "kb_001",
|
||||||
|
"available_int": "not_a_number",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
assert row["available_int"] is None
|
||||||
|
|
||||||
|
def test_convert_float_field(self):
|
||||||
|
"""Test converting float fields."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "float_doc",
|
||||||
|
"_source": {
|
||||||
|
"id": "float_doc",
|
||||||
|
"kb_id": "kb_001",
|
||||||
|
"weight_flt": 0.85,
|
||||||
|
"rank_flt": "0.95", # String that should become float
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
assert row["weight_flt"] == 0.85
|
||||||
|
assert row["rank_flt"] == 0.95
|
||||||
|
|
||||||
|
def test_convert_array_with_special_characters(self):
|
||||||
|
"""Test converting array with special characters."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "special_array",
|
||||||
|
"_source": {
|
||||||
|
"id": "special_array",
|
||||||
|
"kb_id": "kb_001",
|
||||||
|
"important_kwd": ["key\nwith\nnewlines", "key\twith\ttabs"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
# Should be JSON string with escaped characters
|
||||||
|
assert isinstance(row["important_kwd"], str)
|
||||||
|
parsed = json.loads(row["important_kwd"])
|
||||||
|
assert len(parsed) == 2
|
||||||
|
|
||||||
|
def test_convert_already_json_array(self):
|
||||||
|
"""Test converting already JSON-encoded array."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "json_array",
|
||||||
|
"_source": {
|
||||||
|
"id": "json_array",
|
||||||
|
"kb_id": "kb_001",
|
||||||
|
"important_kwd": '["already", "json"]',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
assert row["important_kwd"] == '["already", "json"]'
|
||||||
|
|
||||||
|
def test_convert_single_value_to_array(self):
|
||||||
|
"""Test converting single value to array."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "single_to_array",
|
||||||
|
"_source": {
|
||||||
|
"id": "single_to_array",
|
||||||
|
"kb_id": "kb_001",
|
||||||
|
"important_kwd": "single_keyword",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
parsed = json.loads(row["important_kwd"])
|
||||||
|
assert parsed == ["single_keyword"]
|
||||||
|
|
||||||
|
def test_detect_vector_fields_from_document(self):
|
||||||
|
"""Test detecting vector fields from document."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
doc = {
|
||||||
|
"q_768_vec": [0.1] * 768,
|
||||||
|
"q_1024_vec": [0.2] * 1024,
|
||||||
|
}
|
||||||
|
|
||||||
|
converter.detect_vector_fields(doc)
|
||||||
|
|
||||||
|
assert "q_768_vec" in converter.vector_fields
|
||||||
|
assert "q_1024_vec" in converter.vector_fields
|
||||||
|
|
||||||
|
def test_convert_with_default_values(self):
|
||||||
|
"""Test conversion uses default values."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "default_test",
|
||||||
|
"_source": {
|
||||||
|
"id": "default_test",
|
||||||
|
"kb_id": "kb_001",
|
||||||
|
# available_int not provided, should get default
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
# available_int has default of 1
|
||||||
|
assert row.get("available_int") == 1
|
||||||
|
|
||||||
|
def test_convert_list_content(self):
|
||||||
|
"""Test converting list content to JSON."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "list_content",
|
||||||
|
"_source": {
|
||||||
|
"id": "list_content",
|
||||||
|
"kb_id": "kb_001",
|
||||||
|
"content_with_weight": ["part1", "part2", "part3"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
assert isinstance(row["content_with_weight"], str)
|
||||||
|
parsed = json.loads(row["content_with_weight"])
|
||||||
|
assert parsed == ["part1", "part2", "part3"]
|
||||||
|
|
||||||
|
def test_convert_batch_empty(self):
|
||||||
|
"""Test batch conversion with empty list."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
rows = converter.convert_batch([])
|
||||||
|
|
||||||
|
assert rows == []
|
||||||
|
|
||||||
|
def test_existing_extra_field_merged(self):
|
||||||
|
"""Test that existing extra field is merged with unknown fields."""
|
||||||
|
converter = RAGFlowDataConverter()
|
||||||
|
|
||||||
|
es_doc = {
|
||||||
|
"_id": "merge_extra",
|
||||||
|
"_source": {
|
||||||
|
"id": "merge_extra",
|
||||||
|
"kb_id": "kb_001",
|
||||||
|
"extra": {"existing_key": "existing_value"},
|
||||||
|
"custom_field": "custom_value",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row = converter.convert_document(es_doc)
|
||||||
|
|
||||||
|
# extra should contain both existing and new fields
|
||||||
|
extra = json.loads(row["extra"])
|
||||||
|
assert "custom_field" in extra
|
||||||
385
tools/es-to-oceanbase-migration/tests/test_verify.py
Normal file
385
tools/es-to-oceanbase-migration/tests/test_verify.py
Normal file
@ -0,0 +1,385 @@
|
|||||||
|
"""
|
||||||
|
Tests for migration verification.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import Mock, MagicMock
|
||||||
|
|
||||||
|
from es_ob_migration.verify import MigrationVerifier, VerificationResult
|
||||||
|
|
||||||
|
|
||||||
|
class TestVerificationResult:
|
||||||
|
"""Test VerificationResult dataclass."""
|
||||||
|
|
||||||
|
def test_create_basic_result(self):
|
||||||
|
"""Test creating a basic result."""
|
||||||
|
result = VerificationResult(
|
||||||
|
es_index="ragflow_test",
|
||||||
|
ob_table="ragflow_test",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result.es_index == "ragflow_test"
|
||||||
|
assert result.ob_table == "ragflow_test"
|
||||||
|
assert result.es_count == 0
|
||||||
|
assert result.ob_count == 0
|
||||||
|
assert result.passed is False
|
||||||
|
|
||||||
|
def test_result_default_values(self):
|
||||||
|
"""Test default values."""
|
||||||
|
result = VerificationResult(
|
||||||
|
es_index="test",
|
||||||
|
ob_table="test",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result.count_match is False
|
||||||
|
assert result.count_diff == 0
|
||||||
|
assert result.sample_size == 0
|
||||||
|
assert result.samples_verified == 0
|
||||||
|
assert result.samples_matched == 0
|
||||||
|
assert result.sample_match_rate == 0.0
|
||||||
|
assert result.missing_in_ob == []
|
||||||
|
assert result.data_mismatches == []
|
||||||
|
assert result.message == ""
|
||||||
|
|
||||||
|
def test_result_with_counts(self):
|
||||||
|
"""Test result with count data."""
|
||||||
|
result = VerificationResult(
|
||||||
|
es_index="test",
|
||||||
|
ob_table="test",
|
||||||
|
es_count=1000,
|
||||||
|
ob_count=1000,
|
||||||
|
count_match=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result.es_count == 1000
|
||||||
|
assert result.ob_count == 1000
|
||||||
|
assert result.count_match is True
|
||||||
|
|
||||||
|
|
||||||
|
class TestMigrationVerifier:
|
||||||
|
"""Test MigrationVerifier class."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_es_client(self):
|
||||||
|
"""Create mock ES client."""
|
||||||
|
client = Mock()
|
||||||
|
client.count_documents = Mock(return_value=100)
|
||||||
|
client.get_sample_documents = Mock(return_value=[])
|
||||||
|
return client
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_ob_client(self):
|
||||||
|
"""Create mock OB client."""
|
||||||
|
client = Mock()
|
||||||
|
client.count_rows = Mock(return_value=100)
|
||||||
|
client.get_row_by_id = Mock(return_value=None)
|
||||||
|
return client
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def verifier(self, mock_es_client, mock_ob_client):
|
||||||
|
"""Create verifier with mock clients."""
|
||||||
|
return MigrationVerifier(mock_es_client, mock_ob_client)
|
||||||
|
|
||||||
|
def test_verify_counts_match(self, mock_es_client, mock_ob_client):
|
||||||
|
"""Test verification when counts match."""
|
||||||
|
mock_es_client.count_documents.return_value = 1000
|
||||||
|
mock_ob_client.count_rows.return_value = 1000
|
||||||
|
mock_es_client.get_sample_documents.return_value = []
|
||||||
|
|
||||||
|
verifier = MigrationVerifier(mock_es_client, mock_ob_client)
|
||||||
|
result = verifier.verify("ragflow_test", "ragflow_test", sample_size=0)
|
||||||
|
|
||||||
|
assert result.es_count == 1000
|
||||||
|
assert result.ob_count == 1000
|
||||||
|
assert result.count_match is True
|
||||||
|
assert result.count_diff == 0
|
||||||
|
|
||||||
|
def test_verify_counts_mismatch(self, mock_es_client, mock_ob_client):
|
||||||
|
"""Test verification when counts don't match."""
|
||||||
|
mock_es_client.count_documents.return_value = 1000
|
||||||
|
mock_ob_client.count_rows.return_value = 950
|
||||||
|
mock_es_client.get_sample_documents.return_value = []
|
||||||
|
|
||||||
|
verifier = MigrationVerifier(mock_es_client, mock_ob_client)
|
||||||
|
result = verifier.verify("ragflow_test", "ragflow_test", sample_size=0)
|
||||||
|
|
||||||
|
assert result.es_count == 1000
|
||||||
|
assert result.ob_count == 950
|
||||||
|
assert result.count_match is False
|
||||||
|
assert result.count_diff == 50
|
||||||
|
|
||||||
|
def test_verify_samples_all_match(self, mock_es_client, mock_ob_client):
|
||||||
|
"""Test sample verification when all samples match."""
|
||||||
|
# Setup ES samples
|
||||||
|
es_samples = [
|
||||||
|
{"_id": f"doc_{i}", "id": f"doc_{i}", "kb_id": "kb_001", "content_with_weight": f"content_{i}"}
|
||||||
|
for i in range(10)
|
||||||
|
]
|
||||||
|
mock_es_client.count_documents.return_value = 100
|
||||||
|
mock_es_client.get_sample_documents.return_value = es_samples
|
||||||
|
|
||||||
|
# Setup OB to return matching documents
|
||||||
|
def get_row(table, doc_id):
|
||||||
|
return {"id": doc_id, "kb_id": "kb_001", "content_with_weight": f"content_{doc_id.split('_')[1]}"}
|
||||||
|
|
||||||
|
mock_ob_client.count_rows.return_value = 100
|
||||||
|
mock_ob_client.get_row_by_id.side_effect = get_row
|
||||||
|
|
||||||
|
verifier = MigrationVerifier(mock_es_client, mock_ob_client)
|
||||||
|
result = verifier.verify("ragflow_test", "ragflow_test", sample_size=10)
|
||||||
|
|
||||||
|
assert result.samples_verified == 10
|
||||||
|
assert result.samples_matched == 10
|
||||||
|
assert result.sample_match_rate == 1.0
|
||||||
|
|
||||||
|
def test_verify_samples_some_missing(self, mock_es_client, mock_ob_client):
|
||||||
|
"""Test sample verification when some documents are missing."""
|
||||||
|
es_samples = [
|
||||||
|
{"_id": f"doc_{i}", "id": f"doc_{i}", "kb_id": "kb_001"}
|
||||||
|
for i in range(10)
|
||||||
|
]
|
||||||
|
mock_es_client.count_documents.return_value = 100
|
||||||
|
mock_es_client.get_sample_documents.return_value = es_samples
|
||||||
|
|
||||||
|
# Only return some documents
|
||||||
|
def get_row(table, doc_id):
|
||||||
|
idx = int(doc_id.split("_")[1])
|
||||||
|
if idx < 7: # Only return first 7
|
||||||
|
return {"id": doc_id, "kb_id": "kb_001"}
|
||||||
|
return None
|
||||||
|
|
||||||
|
mock_ob_client.count_rows.return_value = 100
|
||||||
|
mock_ob_client.get_row_by_id.side_effect = get_row
|
||||||
|
|
||||||
|
verifier = MigrationVerifier(mock_es_client, mock_ob_client)
|
||||||
|
result = verifier.verify("ragflow_test", "ragflow_test", sample_size=10)
|
||||||
|
|
||||||
|
assert result.samples_verified == 10
|
||||||
|
assert result.samples_matched == 7
|
||||||
|
assert len(result.missing_in_ob) == 3
|
||||||
|
|
||||||
|
def test_verify_samples_data_mismatch(self, mock_es_client, mock_ob_client):
|
||||||
|
"""Test sample verification when data doesn't match."""
|
||||||
|
es_samples = [
|
||||||
|
{"_id": "doc_1", "id": "doc_1", "kb_id": "kb_001", "available_int": 1}
|
||||||
|
]
|
||||||
|
mock_es_client.count_documents.return_value = 100
|
||||||
|
mock_es_client.get_sample_documents.return_value = es_samples
|
||||||
|
|
||||||
|
# Return document with different data
|
||||||
|
mock_ob_client.count_rows.return_value = 100
|
||||||
|
mock_ob_client.get_row_by_id.return_value = {
|
||||||
|
"id": "doc_1", "kb_id": "kb_002", "available_int": 0 # Different values
|
||||||
|
}
|
||||||
|
|
||||||
|
verifier = MigrationVerifier(mock_es_client, mock_ob_client)
|
||||||
|
result = verifier.verify("ragflow_test", "ragflow_test", sample_size=1)
|
||||||
|
|
||||||
|
assert result.samples_verified == 1
|
||||||
|
assert result.samples_matched == 0
|
||||||
|
assert len(result.data_mismatches) == 1
|
||||||
|
|
||||||
|
def test_values_equal_none_values(self, verifier):
|
||||||
|
"""Test value comparison with None values."""
|
||||||
|
assert verifier._values_equal("field", None, None) is True
|
||||||
|
assert verifier._values_equal("field", "value", None) is False
|
||||||
|
assert verifier._values_equal("field", None, "value") is False
|
||||||
|
|
||||||
|
def test_values_equal_array_columns(self, verifier):
|
||||||
|
"""Test value comparison for array columns."""
|
||||||
|
# Array stored as JSON string in OB
|
||||||
|
assert verifier._values_equal(
|
||||||
|
"important_kwd",
|
||||||
|
["key1", "key2"],
|
||||||
|
'["key1", "key2"]'
|
||||||
|
) is True
|
||||||
|
|
||||||
|
# Order shouldn't matter for arrays
|
||||||
|
assert verifier._values_equal(
|
||||||
|
"important_kwd",
|
||||||
|
["key2", "key1"],
|
||||||
|
'["key1", "key2"]'
|
||||||
|
) is True
|
||||||
|
|
||||||
|
def test_values_equal_json_columns(self, verifier):
|
||||||
|
"""Test value comparison for JSON columns."""
|
||||||
|
assert verifier._values_equal(
|
||||||
|
"metadata",
|
||||||
|
{"author": "John"},
|
||||||
|
'{"author": "John"}'
|
||||||
|
) is True
|
||||||
|
|
||||||
|
def test_values_equal_kb_id_list(self, verifier):
|
||||||
|
"""Test kb_id comparison when ES has list."""
|
||||||
|
# ES sometimes stores kb_id as list
|
||||||
|
assert verifier._values_equal(
|
||||||
|
"kb_id",
|
||||||
|
["kb_001", "kb_002"],
|
||||||
|
"kb_001"
|
||||||
|
) is True
|
||||||
|
|
||||||
|
def test_values_equal_content_with_weight_dict(self, verifier):
|
||||||
|
"""Test content_with_weight comparison when OB has JSON string."""
|
||||||
|
assert verifier._values_equal(
|
||||||
|
"content_with_weight",
|
||||||
|
{"text": "content", "weight": 1.0},
|
||||||
|
'{"text": "content", "weight": 1.0}'
|
||||||
|
) is True
|
||||||
|
|
||||||
|
def test_determine_result_passed(self, mock_es_client, mock_ob_client):
|
||||||
|
"""Test result determination for passed verification."""
|
||||||
|
mock_es_client.count_documents.return_value = 1000
|
||||||
|
mock_ob_client.count_rows.return_value = 1000
|
||||||
|
|
||||||
|
es_samples = [{"_id": f"doc_{i}", "id": f"doc_{i}", "kb_id": "kb_001"} for i in range(100)]
|
||||||
|
mock_es_client.get_sample_documents.return_value = es_samples
|
||||||
|
mock_ob_client.get_row_by_id.side_effect = lambda t, d: {"id": d, "kb_id": "kb_001"}
|
||||||
|
|
||||||
|
verifier = MigrationVerifier(mock_es_client, mock_ob_client)
|
||||||
|
result = verifier.verify("test", "test", sample_size=100)
|
||||||
|
|
||||||
|
assert result.passed is True
|
||||||
|
assert "PASSED" in result.message
|
||||||
|
|
||||||
|
def test_determine_result_failed_count(self, mock_es_client, mock_ob_client):
|
||||||
|
"""Test result determination when count verification fails."""
|
||||||
|
mock_es_client.count_documents.return_value = 1000
|
||||||
|
mock_ob_client.count_rows.return_value = 500 # Big difference
|
||||||
|
mock_es_client.get_sample_documents.return_value = []
|
||||||
|
|
||||||
|
verifier = MigrationVerifier(mock_es_client, mock_ob_client)
|
||||||
|
result = verifier.verify("test", "test", sample_size=0)
|
||||||
|
|
||||||
|
assert result.passed is False
|
||||||
|
assert "FAILED" in result.message
|
||||||
|
|
||||||
|
def test_determine_result_failed_samples(self, mock_es_client, mock_ob_client):
|
||||||
|
"""Test result determination when sample verification fails."""
|
||||||
|
mock_es_client.count_documents.return_value = 100
|
||||||
|
mock_ob_client.count_rows.return_value = 100
|
||||||
|
|
||||||
|
es_samples = [{"_id": f"doc_{i}", "id": f"doc_{i}"} for i in range(10)]
|
||||||
|
mock_es_client.get_sample_documents.return_value = es_samples
|
||||||
|
mock_ob_client.get_row_by_id.return_value = None # All missing
|
||||||
|
|
||||||
|
verifier = MigrationVerifier(mock_es_client, mock_ob_client)
|
||||||
|
result = verifier.verify("test", "test", sample_size=10)
|
||||||
|
|
||||||
|
assert result.passed is False
|
||||||
|
|
||||||
|
def test_generate_report(self, verifier):
|
||||||
|
"""Test report generation."""
|
||||||
|
result = VerificationResult(
|
||||||
|
es_index="ragflow_test",
|
||||||
|
ob_table="ragflow_test",
|
||||||
|
es_count=1000,
|
||||||
|
ob_count=1000,
|
||||||
|
count_match=True,
|
||||||
|
count_diff=0,
|
||||||
|
sample_size=100,
|
||||||
|
samples_verified=100,
|
||||||
|
samples_matched=100,
|
||||||
|
sample_match_rate=1.0,
|
||||||
|
passed=True,
|
||||||
|
message="Verification PASSED",
|
||||||
|
)
|
||||||
|
|
||||||
|
report = verifier.generate_report(result)
|
||||||
|
|
||||||
|
assert "ragflow_test" in report
|
||||||
|
assert "1,000" in report
|
||||||
|
assert "PASSED" in report
|
||||||
|
assert "100.00%" in report
|
||||||
|
|
||||||
|
def test_generate_report_with_missing(self, verifier):
|
||||||
|
"""Test report generation with missing documents."""
|
||||||
|
result = VerificationResult(
|
||||||
|
es_index="test",
|
||||||
|
ob_table="test",
|
||||||
|
es_count=100,
|
||||||
|
ob_count=95,
|
||||||
|
count_match=False,
|
||||||
|
count_diff=5,
|
||||||
|
sample_size=10,
|
||||||
|
samples_verified=10,
|
||||||
|
samples_matched=8,
|
||||||
|
sample_match_rate=0.8,
|
||||||
|
missing_in_ob=["doc_1", "doc_2"],
|
||||||
|
passed=False,
|
||||||
|
message="Verification FAILED",
|
||||||
|
)
|
||||||
|
|
||||||
|
report = verifier.generate_report(result)
|
||||||
|
|
||||||
|
assert "Missing in OceanBase" in report
|
||||||
|
assert "doc_1" in report
|
||||||
|
assert "FAILED" in report
|
||||||
|
|
||||||
|
def test_generate_report_with_mismatches(self, verifier):
|
||||||
|
"""Test report generation with data mismatches."""
|
||||||
|
result = VerificationResult(
|
||||||
|
es_index="test",
|
||||||
|
ob_table="test",
|
||||||
|
es_count=100,
|
||||||
|
ob_count=100,
|
||||||
|
count_match=True,
|
||||||
|
sample_size=10,
|
||||||
|
samples_verified=10,
|
||||||
|
samples_matched=8,
|
||||||
|
sample_match_rate=0.8,
|
||||||
|
data_mismatches=[
|
||||||
|
{
|
||||||
|
"id": "doc_1",
|
||||||
|
"differences": [
|
||||||
|
{"field": "kb_id", "es_value": "kb_001", "ob_value": "kb_002"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
passed=False,
|
||||||
|
message="Verification FAILED",
|
||||||
|
)
|
||||||
|
|
||||||
|
report = verifier.generate_report(result)
|
||||||
|
|
||||||
|
assert "Data Mismatches" in report
|
||||||
|
assert "doc_1" in report
|
||||||
|
assert "kb_id" in report
|
||||||
|
|
||||||
|
|
||||||
|
class TestValueComparison:
|
||||||
|
"""Test value comparison edge cases."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def verifier(self):
|
||||||
|
"""Create verifier with mock clients."""
|
||||||
|
return MigrationVerifier(Mock(), Mock())
|
||||||
|
|
||||||
|
def test_string_comparison(self, verifier):
|
||||||
|
"""Test string comparison."""
|
||||||
|
assert verifier._values_equal("field", "value", "value") is True
|
||||||
|
assert verifier._values_equal("field", "value1", "value2") is False
|
||||||
|
|
||||||
|
def test_integer_comparison(self, verifier):
|
||||||
|
"""Test integer comparison (converted to string)."""
|
||||||
|
assert verifier._values_equal("field", 123, "123") is True
|
||||||
|
assert verifier._values_equal("field", "123", 123) is True
|
||||||
|
|
||||||
|
def test_float_comparison(self, verifier):
|
||||||
|
"""Test float comparison."""
|
||||||
|
assert verifier._values_equal("field", 1.5, "1.5") is True
|
||||||
|
|
||||||
|
def test_boolean_comparison(self, verifier):
|
||||||
|
"""Test boolean comparison."""
|
||||||
|
assert verifier._values_equal("field", True, "True") is True
|
||||||
|
assert verifier._values_equal("field", False, "False") is True
|
||||||
|
|
||||||
|
def test_empty_array_comparison(self, verifier):
|
||||||
|
"""Test empty array comparison."""
|
||||||
|
assert verifier._values_equal("important_kwd", [], "[]") is True
|
||||||
|
|
||||||
|
def test_nested_json_comparison(self, verifier):
|
||||||
|
"""Test nested JSON comparison."""
|
||||||
|
es_value = {"nested": {"key": "value"}}
|
||||||
|
ob_value = '{"nested": {"key": "value"}}'
|
||||||
|
assert verifier._values_equal("metadata", es_value, ob_value) is True
|
||||||
960
tools/es-to-oceanbase-migration/uv.lock
generated
Normal file
960
tools/es-to-oceanbase-migration/uv.lock
generated
Normal file
@ -0,0 +1,960 @@
|
|||||||
|
version = 1
|
||||||
|
revision = 3
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
resolution-markers = [
|
||||||
|
"python_full_version >= '3.11'",
|
||||||
|
"python_full_version < '3.11'",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aiomysql"
|
||||||
|
version = "0.3.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "pymysql" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/29/e0/302aeffe8d90853556f47f3106b89c16cc2ec2a4d269bdfd82e3f4ae12cc/aiomysql-0.3.2.tar.gz", hash = "sha256:72d15ef5cfc34c03468eb41e1b90adb9fd9347b0b589114bd23ead569a02ac1a", size = 108311, upload-time = "2025-10-22T00:15:21.278Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4c/af/aae0153c3e28712adaf462328f6c7a3c196a1c1c27b491de4377dd3e6b52/aiomysql-0.3.2-py3-none-any.whl", hash = "sha256:c82c5ba04137d7afd5c693a258bea8ead2aad77101668044143a991e04632eb2", size = 71834, upload-time = "2025-10-22T00:15:15.905Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "annotated-types"
|
||||||
|
version = "0.7.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anyio"
|
||||||
|
version = "4.12.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||||
|
{ name = "idna" },
|
||||||
|
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "backports-asyncio-runner"
|
||||||
|
version = "1.2.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "certifi"
|
||||||
|
version = "2026.1.4"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click"
|
||||||
|
version = "8.3.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colorama"
|
||||||
|
version = "0.4.6"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "coverage"
|
||||||
|
version = "7.13.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ad/49/349848445b0e53660e258acbcc9b0d014895b6739237920886672240f84b/coverage-7.13.2.tar.gz", hash = "sha256:044c6951ec37146b72a50cc81ef02217d27d4c3640efd2640311393cbbf143d3", size = 826523, upload-time = "2026-01-25T13:00:04.889Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a4/2d/63e37369c8e81a643afe54f76073b020f7b97ddbe698c5c944b51b0a2bc5/coverage-7.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4af3b01763909f477ea17c962e2cca8f39b350a4e46e3a30838b2c12e31b81b", size = 218842, upload-time = "2026-01-25T12:57:15.3Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/57/06/86ce882a8d58cbcb3030e298788988e618da35420d16a8c66dac34f138d0/coverage-7.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36393bd2841fa0b59498f75466ee9bdec4f770d3254f031f23e8fd8e140ffdd2", size = 219360, upload-time = "2026-01-25T12:57:17.572Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cd/84/70b0eb1ee19ca4ef559c559054c59e5b2ae4ec9af61398670189e5d276e9/coverage-7.13.2-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9cc7573518b7e2186bd229b1a0fe24a807273798832c27032c4510f47ffdb896", size = 246123, upload-time = "2026-01-25T12:57:19.087Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/35/fb/05b9830c2e8275ebc031e0019387cda99113e62bb500ab328bb72578183b/coverage-7.13.2-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ca9566769b69a5e216a4e176d54b9df88f29d750c5b78dbb899e379b4e14b30c", size = 247930, upload-time = "2026-01-25T12:57:20.929Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/81/aa/3f37858ca2eed4f09b10ca3c6ddc9041be0a475626cd7fd2712f4a2d526f/coverage-7.13.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c9bdea644e94fd66d75a6f7e9a97bb822371e1fe7eadae2cacd50fcbc28e4dc", size = 249804, upload-time = "2026-01-25T12:57:22.904Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b6/b3/c904f40c56e60a2d9678a5ee8df3d906d297d15fb8bec5756c3b0a67e2df/coverage-7.13.2-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5bd447332ec4f45838c1ad42268ce21ca87c40deb86eabd59888859b66be22a5", size = 246815, upload-time = "2026-01-25T12:57:24.314Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/41/91/ddc1c5394ca7fd086342486440bfdd6b9e9bda512bf774599c7c7a0081e0/coverage-7.13.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7c79ad5c28a16a1277e1187cf83ea8dafdcc689a784228a7d390f19776db7c31", size = 247843, upload-time = "2026-01-25T12:57:26.544Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/d2/cdff8f4cd33697883c224ea8e003e9c77c0f1a837dc41d95a94dd26aad67/coverage-7.13.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:76e06ccacd1fb6ada5d076ed98a8c6f66e2e6acd3df02819e2ee29fd637b76ad", size = 245850, upload-time = "2026-01-25T12:57:28.507Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f5/42/e837febb7866bf2553ab53dd62ed52f9bb36d60c7e017c55376ad21fbb05/coverage-7.13.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:49d49e9a5e9f4dc3d3dac95278a020afa6d6bdd41f63608a76fa05a719d5b66f", size = 246116, upload-time = "2026-01-25T12:57:30.16Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/09/b1/4a3f935d7df154df02ff4f71af8d61298d713a7ba305d050ae475bfbdde2/coverage-7.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ed2bce0e7bfa53f7b0b01c722da289ef6ad4c18ebd52b1f93704c21f116360c8", size = 246720, upload-time = "2026-01-25T12:57:32.165Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e1/fe/538a6fd44c515f1c5197a3f078094cbaf2ce9f945df5b44e29d95c864bff/coverage-7.13.2-cp310-cp310-win32.whl", hash = "sha256:1574983178b35b9af4db4a9f7328a18a14a0a0ce76ffaa1c1bacb4cc82089a7c", size = 221465, upload-time = "2026-01-25T12:57:33.511Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5e/09/4b63a024295f326ec1a40ec8def27799300ce8775b1cbf0d33b1790605c4/coverage-7.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:a360a8baeb038928ceb996f5623a4cd508728f8f13e08d4e96ce161702f3dd99", size = 222397, upload-time = "2026-01-25T12:57:34.927Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6c/01/abca50583a8975bb6e1c59eff67ed8e48bb127c07dad5c28d9e96ccc09ec/coverage-7.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:060ebf6f2c51aff5ba38e1f43a2095e087389b1c69d559fde6049a4b0001320e", size = 218971, upload-time = "2026-01-25T12:57:36.953Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/eb/0e/b6489f344d99cd1e5b4d5e1be52dfd3f8a3dc5112aa6c33948da8cabad4e/coverage-7.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1ea8ca9db5e7469cd364552985e15911548ea5b69c48a17291f0cac70484b2e", size = 219473, upload-time = "2026-01-25T12:57:38.934Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/17/11/db2f414915a8e4ec53f60b17956c27f21fb68fcf20f8a455ce7c2ccec638/coverage-7.13.2-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b780090d15fd58f07cf2011943e25a5f0c1c894384b13a216b6c86c8a8a7c508", size = 249896, upload-time = "2026-01-25T12:57:40.365Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/80/06/0823fe93913663c017e508e8810c998c8ebd3ec2a5a85d2c3754297bdede/coverage-7.13.2-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:88a800258d83acb803c38175b4495d293656d5fac48659c953c18e5f539a274b", size = 251810, upload-time = "2026-01-25T12:57:42.045Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/61/dc/b151c3cc41b28cdf7f0166c5fa1271cbc305a8ec0124cce4b04f74791a18/coverage-7.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6326e18e9a553e674d948536a04a80d850a5eeefe2aae2e6d7cf05d54046c01b", size = 253920, upload-time = "2026-01-25T12:57:44.026Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2d/35/e83de0556e54a4729a2b94ea816f74ce08732e81945024adee46851c2264/coverage-7.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:59562de3f797979e1ff07c587e2ac36ba60ca59d16c211eceaa579c266c5022f", size = 250025, upload-time = "2026-01-25T12:57:45.624Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/39/67/af2eb9c3926ce3ea0d58a0d2516fcbdacf7a9fc9559fe63076beaf3f2596/coverage-7.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:27ba1ed6f66b0e2d61bfa78874dffd4f8c3a12f8e2b5410e515ab345ba7bc9c3", size = 251612, upload-time = "2026-01-25T12:57:47.713Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/26/62/5be2e25f3d6c711d23b71296f8b44c978d4c8b4e5b26871abfc164297502/coverage-7.13.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8be48da4d47cc68754ce643ea50b3234557cbefe47c2f120495e7bd0a2756f2b", size = 249670, upload-time = "2026-01-25T12:57:49.378Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b3/51/400d1b09a8344199f9b6a6fc1868005d766b7ea95e7882e494fa862ca69c/coverage-7.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2a47a4223d3361b91176aedd9d4e05844ca67d7188456227b6bf5e436630c9a1", size = 249395, upload-time = "2026-01-25T12:57:50.86Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e0/36/f02234bc6e5230e2f0a63fd125d0a2093c73ef20fdf681c7af62a140e4e7/coverage-7.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c6f141b468740197d6bd38f2b26ade124363228cc3f9858bd9924ab059e00059", size = 250298, upload-time = "2026-01-25T12:57:52.287Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b0/06/713110d3dd3151b93611c9cbfc65c15b4156b44f927fced49ac0b20b32a4/coverage-7.13.2-cp311-cp311-win32.whl", hash = "sha256:89567798404af067604246e01a49ef907d112edf2b75ef814b1364d5ce267031", size = 221485, upload-time = "2026-01-25T12:57:53.876Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/16/0c/3ae6255fa1ebcb7dec19c9a59e85ef5f34566d1265c70af5b2fc981da834/coverage-7.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:21dd57941804ae2ac7e921771a5e21bbf9aabec317a041d164853ad0a96ce31e", size = 222421, upload-time = "2026-01-25T12:57:55.433Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b5/37/fabc3179af4d61d89ea47bd04333fec735cd5e8b59baad44fed9fc4170d7/coverage-7.13.2-cp311-cp311-win_arm64.whl", hash = "sha256:10758e0586c134a0bafa28f2d37dd2cdb5e4a90de25c0fc0c77dabbad46eca28", size = 221088, upload-time = "2026-01-25T12:57:57.41Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/46/39/e92a35f7800222d3f7b2cbb7bbc3b65672ae8d501cb31801b2d2bd7acdf1/coverage-7.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f106b2af193f965d0d3234f3f83fc35278c7fb935dfbde56ae2da3dd2c03b84d", size = 219142, upload-time = "2026-01-25T12:58:00.448Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/45/7a/8bf9e9309c4c996e65c52a7c5a112707ecdd9fbaf49e10b5a705a402bbb4/coverage-7.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f45d21dc4d5d6bd29323f0320089ef7eae16e4bef712dff79d184fa7330af3", size = 219503, upload-time = "2026-01-25T12:58:02.451Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/93/17661e06b7b37580923f3f12406ac91d78aeed293fb6da0b69cc7957582f/coverage-7.13.2-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:fae91dfecd816444c74531a9c3d6ded17a504767e97aa674d44f638107265b99", size = 251006, upload-time = "2026-01-25T12:58:04.059Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/12/f0/f9e59fb8c310171497f379e25db060abef9fa605e09d63157eebec102676/coverage-7.13.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:264657171406c114787b441484de620e03d8f7202f113d62fcd3d9688baa3e6f", size = 253750, upload-time = "2026-01-25T12:58:05.574Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/b1/1935e31add2232663cf7edd8269548b122a7d100047ff93475dbaaae673e/coverage-7.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae47d8dcd3ded0155afbb59c62bd8ab07ea0fd4902e1c40567439e6db9dcaf2f", size = 254862, upload-time = "2026-01-25T12:58:07.647Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/af/59/b5e97071ec13df5f45da2b3391b6cdbec78ba20757bc92580a5b3d5fa53c/coverage-7.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8a0b33e9fd838220b007ce8f299114d406c1e8edb21336af4c97a26ecfd185aa", size = 251420, upload-time = "2026-01-25T12:58:09.309Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3f/75/9495932f87469d013dc515fb0ce1aac5fa97766f38f6b1a1deb1ee7b7f3a/coverage-7.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b3becbea7f3ce9a2d4d430f223ec15888e4deb31395840a79e916368d6004cce", size = 252786, upload-time = "2026-01-25T12:58:10.909Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6a/59/af550721f0eb62f46f7b8cb7e6f1860592189267b1c411a4e3a057caacee/coverage-7.13.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f819c727a6e6eeb8711e4ce63d78c620f69630a2e9d53bc95ca5379f57b6ba94", size = 250928, upload-time = "2026-01-25T12:58:12.449Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9b/b1/21b4445709aae500be4ab43bbcfb4e53dc0811c3396dcb11bf9f23fd0226/coverage-7.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:4f7b71757a3ab19f7ba286e04c181004c1d61be921795ee8ba6970fd0ec91da5", size = 250496, upload-time = "2026-01-25T12:58:14.047Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ba/b1/0f5d89dfe0392990e4f3980adbde3eb34885bc1effb2dc369e0bf385e389/coverage-7.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b7fc50d2afd2e6b4f6f2f403b70103d280a8e0cb35320cbbe6debcda02a1030b", size = 252373, upload-time = "2026-01-25T12:58:15.976Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/01/c9/0cf1a6a57a9968cc049a6b896693faa523c638a5314b1fc374eb2b2ac904/coverage-7.13.2-cp312-cp312-win32.whl", hash = "sha256:292250282cf9bcf206b543d7608bda17ca6fc151f4cbae949fc7e115112fbd41", size = 221696, upload-time = "2026-01-25T12:58:17.517Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4d/05/d7540bf983f09d32803911afed135524570f8c47bb394bf6206c1dc3a786/coverage-7.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:eeea10169fac01549a7921d27a3e517194ae254b542102267bef7a93ed38c40e", size = 222504, upload-time = "2026-01-25T12:58:19.115Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/15/8b/1a9f037a736ced0a12aacf6330cdaad5008081142a7070bc58b0f7930cbc/coverage-7.13.2-cp312-cp312-win_arm64.whl", hash = "sha256:2a5b567f0b635b592c917f96b9a9cb3dbd4c320d03f4bf94e9084e494f2e8894", size = 221120, upload-time = "2026-01-25T12:58:21.334Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a7/f0/3d3eac7568ab6096ff23791a526b0048a1ff3f49d0e236b2af6fb6558e88/coverage-7.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed75de7d1217cf3b99365d110975f83af0528c849ef5180a12fd91b5064df9d6", size = 219168, upload-time = "2026-01-25T12:58:23.376Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a3/a6/f8b5cfeddbab95fdef4dcd682d82e5dcff7a112ced57a959f89537ee9995/coverage-7.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97e596de8fa9bada4d88fde64a3f4d37f1b6131e4faa32bad7808abc79887ddc", size = 219537, upload-time = "2026-01-25T12:58:24.932Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7b/e6/8d8e6e0c516c838229d1e41cadcec91745f4b1031d4db17ce0043a0423b4/coverage-7.13.2-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:68c86173562ed4413345410c9480a8d64864ac5e54a5cda236748031e094229f", size = 250528, upload-time = "2026-01-25T12:58:26.567Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8e/78/befa6640f74092b86961f957f26504c8fba3d7da57cc2ab7407391870495/coverage-7.13.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7be4d613638d678b2b3773b8f687537b284d7074695a43fe2fbbfc0e31ceaed1", size = 253132, upload-time = "2026-01-25T12:58:28.251Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9d/10/1630db1edd8ce675124a2ee0f7becc603d2bb7b345c2387b4b95c6907094/coverage-7.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d7f63ce526a96acd0e16c4af8b50b64334239550402fb1607ce6a584a6d62ce9", size = 254374, upload-time = "2026-01-25T12:58:30.294Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ed/1d/0d9381647b1e8e6d310ac4140be9c428a0277330991e0c35bdd751e338a4/coverage-7.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:406821f37f864f968e29ac14c3fccae0fec9fdeba48327f0341decf4daf92d7c", size = 250762, upload-time = "2026-01-25T12:58:32.036Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/43/e4/5636dfc9a7c871ee8776af83ee33b4c26bc508ad6cee1e89b6419a366582/coverage-7.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ee68e5a4e3e5443623406b905db447dceddffee0dceb39f4e0cd9ec2a35004b5", size = 252502, upload-time = "2026-01-25T12:58:33.961Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/02/2a/7ff2884d79d420cbb2d12fed6fff727b6d0ef27253140d3cdbbd03187ee0/coverage-7.13.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2ee0e58cca0c17dd9c6c1cdde02bb705c7b3fbfa5f3b0b5afeda20d4ebff8ef4", size = 250463, upload-time = "2026-01-25T12:58:35.529Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/91/c0/ba51087db645b6c7261570400fc62c89a16278763f36ba618dc8657a187b/coverage-7.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6e5bbb5018bf76a56aabdb64246b5288d5ae1b7d0dd4d0534fe86df2c2992d1c", size = 250288, upload-time = "2026-01-25T12:58:37.226Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/03/07/44e6f428551c4d9faf63ebcefe49b30e5c89d1be96f6a3abd86a52da9d15/coverage-7.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a55516c68ef3e08e134e818d5e308ffa6b1337cc8b092b69b24287bf07d38e31", size = 252063, upload-time = "2026-01-25T12:58:38.821Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c2/67/35b730ad7e1859dd57e834d1bc06080d22d2f87457d53f692fce3f24a5a9/coverage-7.13.2-cp313-cp313-win32.whl", hash = "sha256:5b20211c47a8abf4abc3319d8ce2464864fa9f30c5fcaf958a3eed92f4f1fef8", size = 221716, upload-time = "2026-01-25T12:58:40.484Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0d/82/e5fcf5a97c72f45fc14829237a6550bf49d0ab882ac90e04b12a69db76b4/coverage-7.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:14f500232e521201cf031549fb1ebdfc0a40f401cf519157f76c397e586c3beb", size = 222522, upload-time = "2026-01-25T12:58:43.247Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/f1/25d7b2f946d239dd2d6644ca2cc060d24f97551e2af13b6c24c722ae5f97/coverage-7.13.2-cp313-cp313-win_arm64.whl", hash = "sha256:9779310cb5a9778a60c899f075a8514c89fa6d10131445c2207fc893e0b14557", size = 221145, upload-time = "2026-01-25T12:58:45Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9e/f7/080376c029c8f76fadfe43911d0daffa0cbdc9f9418a0eead70c56fb7f4b/coverage-7.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e64fa5a1e41ce5df6b547cbc3d3699381c9e2c2c369c67837e716ed0f549d48e", size = 219861, upload-time = "2026-01-25T12:58:46.586Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/42/11/0b5e315af5ab35f4c4a70e64d3314e4eec25eefc6dec13be3a7d5ffe8ac5/coverage-7.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b01899e82a04085b6561eb233fd688474f57455e8ad35cd82286463ba06332b7", size = 220207, upload-time = "2026-01-25T12:58:48.277Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b2/0c/0874d0318fb1062117acbef06a09cf8b63f3060c22265adaad24b36306b7/coverage-7.13.2-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:838943bea48be0e2768b0cf7819544cdedc1bbb2f28427eabb6eb8c9eb2285d3", size = 261504, upload-time = "2026-01-25T12:58:49.904Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/83/5e/1cd72c22ecb30751e43a72f40ba50fcef1b7e93e3ea823bd9feda8e51f9a/coverage-7.13.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:93d1d25ec2b27e90bcfef7012992d1f5121b51161b8bffcda756a816cf13c2c3", size = 263582, upload-time = "2026-01-25T12:58:51.582Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9b/da/8acf356707c7a42df4d0657020308e23e5a07397e81492640c186268497c/coverage-7.13.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93b57142f9621b0d12349c43fc7741fe578e4bc914c1e5a54142856cfc0bf421", size = 266008, upload-time = "2026-01-25T12:58:53.234Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/41/41/ea1730af99960309423c6ea8d6a4f1fa5564b2d97bd1d29dda4b42611f04/coverage-7.13.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f06799ae1bdfff7ccb8665d75f8291c69110ba9585253de254688aa8a1ccc6c5", size = 260762, upload-time = "2026-01-25T12:58:55.372Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/22/fa/02884d2080ba71db64fdc127b311db60e01fe6ba797d9c8363725e39f4d5/coverage-7.13.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f9405ab4f81d490811b1d91c7a20361135a2df4c170e7f0b747a794da5b7f23", size = 263571, upload-time = "2026-01-25T12:58:57.52Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d2/6b/4083aaaeba9b3112f55ac57c2ce7001dc4d8fa3fcc228a39f09cc84ede27/coverage-7.13.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f9ab1d5b86f8fbc97a5b3cd6280a3fd85fef3b028689d8a2c00918f0d82c728c", size = 261200, upload-time = "2026-01-25T12:58:59.255Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e9/d2/aea92fa36d61955e8c416ede9cf9bf142aa196f3aea214bb67f85235a050/coverage-7.13.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:f674f59712d67e841525b99e5e2b595250e39b529c3bda14764e4f625a3fa01f", size = 260095, upload-time = "2026-01-25T12:59:01.066Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0d/ae/04ffe96a80f107ea21b22b2367175c621da920063260a1c22f9452fd7866/coverage-7.13.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c6cadac7b8ace1ba9144feb1ae3cb787a6065ba6d23ffc59a934b16406c26573", size = 262284, upload-time = "2026-01-25T12:59:02.802Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1c/7a/6f354dcd7dfc41297791d6fb4e0d618acb55810bde2c1fd14b3939e05c2b/coverage-7.13.2-cp313-cp313t-win32.whl", hash = "sha256:14ae4146465f8e6e6253eba0cccd57423e598a4cb925958b240c805300918343", size = 222389, upload-time = "2026-01-25T12:59:04.563Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8d/d5/080ad292a4a3d3daf411574be0a1f56d6dee2c4fdf6b005342be9fac807f/coverage-7.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9074896edd705a05769e3de0eac0a8388484b503b68863dd06d5e473f874fd47", size = 223450, upload-time = "2026-01-25T12:59:06.677Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/88/96/df576fbacc522e9fb8d1c4b7a7fc62eb734be56e2cba1d88d2eabe08ea3f/coverage-7.13.2-cp313-cp313t-win_arm64.whl", hash = "sha256:69e526e14f3f854eda573d3cf40cffd29a1a91c684743d904c33dbdcd0e0f3e7", size = 221707, upload-time = "2026-01-25T12:59:08.363Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/55/53/1da9e51a0775634b04fcc11eb25c002fc58ee4f92ce2e8512f94ac5fc5bf/coverage-7.13.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:387a825f43d680e7310e6f325b2167dd093bc8ffd933b83e9aa0983cf6e0a2ef", size = 219213, upload-time = "2026-01-25T12:59:11.909Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/46/35/b3caac3ebbd10230fea5a33012b27d19e999a17c9285c4228b4b2e35b7da/coverage-7.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f0d7fea9d8e5d778cd5a9e8fc38308ad688f02040e883cdc13311ef2748cb40f", size = 219549, upload-time = "2026-01-25T12:59:13.638Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/76/9c/e1cf7def1bdc72c1907e60703983a588f9558434a2ff94615747bd73c192/coverage-7.13.2-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e080afb413be106c95c4ee96b4fffdc9e2fa56a8bbf90b5c0918e5c4449412f5", size = 250586, upload-time = "2026-01-25T12:59:15.808Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ba/49/f54ec02ed12be66c8d8897270505759e057b0c68564a65c429ccdd1f139e/coverage-7.13.2-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a7fc042ba3c7ce25b8a9f097eb0f32a5ce1ccdb639d9eec114e26def98e1f8a4", size = 253093, upload-time = "2026-01-25T12:59:17.491Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fb/5e/aaf86be3e181d907e23c0f61fccaeb38de8e6f6b47aed92bf57d8fc9c034/coverage-7.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0ba505e021557f7f8173ee8cd6b926373d8653e5ff7581ae2efce1b11ef4c27", size = 254446, upload-time = "2026-01-25T12:59:19.752Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/28/c8/a5fa01460e2d75b0c853b392080d6829d3ca8b5ab31e158fa0501bc7c708/coverage-7.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7de326f80e3451bd5cc7239ab46c73ddb658fe0b7649476bc7413572d36cd548", size = 250615, upload-time = "2026-01-25T12:59:21.928Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/86/0b/6d56315a55f7062bb66410732c24879ccb2ec527ab6630246de5fe45a1df/coverage-7.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:abaea04f1e7e34841d4a7b343904a3f59481f62f9df39e2cd399d69a187a9660", size = 252452, upload-time = "2026-01-25T12:59:23.592Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/30/19/9bc550363ebc6b0ea121977ee44d05ecd1e8bf79018b8444f1028701c563/coverage-7.13.2-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9f93959ee0c604bccd8e0697be21de0887b1f73efcc3aa73a3ec0fd13feace92", size = 250418, upload-time = "2026-01-25T12:59:25.392Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1f/53/580530a31ca2f0cc6f07a8f2ab5460785b02bb11bdf815d4c4d37a4c5169/coverage-7.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:13fe81ead04e34e105bf1b3c9f9cdf32ce31736ee5d90a8d2de02b9d3e1bcb82", size = 250231, upload-time = "2026-01-25T12:59:27.888Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e2/42/dd9093f919dc3088cb472893651884bd675e3df3d38a43f9053656dca9a2/coverage-7.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d6d16b0f71120e365741bca2cb473ca6fe38930bc5431c5e850ba949f708f892", size = 251888, upload-time = "2026-01-25T12:59:29.636Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fa/a6/0af4053e6e819774626e133c3d6f70fae4d44884bfc4b126cb647baee8d3/coverage-7.13.2-cp314-cp314-win32.whl", hash = "sha256:9b2f4714bb7d99ba3790ee095b3b4ac94767e1347fe424278a0b10acb3ff04fe", size = 221968, upload-time = "2026-01-25T12:59:31.424Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c4/cc/5aff1e1f80d55862442855517bb8ad8ad3a68639441ff6287dde6a58558b/coverage-7.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:e4121a90823a063d717a96e0a0529c727fb31ea889369a0ee3ec00ed99bf6859", size = 222783, upload-time = "2026-01-25T12:59:33.118Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/de/20/09abafb24f84b3292cc658728803416c15b79f9ee5e68d25238a895b07d9/coverage-7.13.2-cp314-cp314-win_arm64.whl", hash = "sha256:6873f0271b4a15a33e7590f338d823f6f66f91ed147a03938d7ce26efd04eee6", size = 221348, upload-time = "2026-01-25T12:59:34.939Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b6/60/a3820c7232db63be060e4019017cd3426751c2699dab3c62819cdbcea387/coverage-7.13.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f61d349f5b7cd95c34017f1927ee379bfbe9884300d74e07cf630ccf7a610c1b", size = 219950, upload-time = "2026-01-25T12:59:36.624Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fd/37/e4ef5975fdeb86b1e56db9a82f41b032e3d93a840ebaf4064f39e770d5c5/coverage-7.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a43d34ce714f4ca674c0d90beb760eb05aad906f2c47580ccee9da8fe8bfb417", size = 220209, upload-time = "2026-01-25T12:59:38.339Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/54/df/d40e091d00c51adca1e251d3b60a8b464112efa3004949e96a74d7c19a64/coverage-7.13.2-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bff1b04cb9d4900ce5c56c4942f047dc7efe57e2608cb7c3c8936e9970ccdbee", size = 261576, upload-time = "2026-01-25T12:59:40.446Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c5/44/5259c4bed54e3392e5c176121af9f71919d96dde853386e7730e705f3520/coverage-7.13.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6ae99e4560963ad8e163e819e5d77d413d331fd00566c1e0856aa252303552c1", size = 263704, upload-time = "2026-01-25T12:59:42.346Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/16/bd/ae9f005827abcbe2c70157459ae86053971c9fa14617b63903abbdce26d9/coverage-7.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e79a8c7d461820257d9aa43716c4efc55366d7b292e46b5b37165be1d377405d", size = 266109, upload-time = "2026-01-25T12:59:44.073Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a2/c0/8e279c1c0f5b1eaa3ad9b0fb7a5637fc0379ea7d85a781c0fe0bb3cfc2ab/coverage-7.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:060ee84f6a769d40c492711911a76811b4befb6fba50abb450371abb720f5bd6", size = 260686, upload-time = "2026-01-25T12:59:45.804Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b2/47/3a8112627e9d863e7cddd72894171c929e94491a597811725befdcd76bce/coverage-7.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bca209d001fd03ea2d978f8a4985093240a355c93078aee3f799852c23f561a", size = 263568, upload-time = "2026-01-25T12:59:47.929Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/92/bc/7ea367d84afa3120afc3ce6de294fd2dcd33b51e2e7fbe4bbfd200f2cb8c/coverage-7.13.2-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:6b8092aa38d72f091db61ef83cb66076f18f02da3e1a75039a4f218629600e04", size = 261174, upload-time = "2026-01-25T12:59:49.717Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/33/b7/f1092dcecb6637e31cc2db099581ee5c61a17647849bae6b8261a2b78430/coverage-7.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:4a3158dc2dcce5200d91ec28cd315c999eebff355437d2765840555d765a6e5f", size = 260017, upload-time = "2026-01-25T12:59:51.463Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2b/cd/f3d07d4b95fbe1a2ef0958c15da614f7e4f557720132de34d2dc3aa7e911/coverage-7.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3973f353b2d70bd9796cc12f532a05945232ccae966456c8ed7034cb96bbfd6f", size = 262337, upload-time = "2026-01-25T12:59:53.407Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e0/db/b0d5b2873a07cb1e06a55d998697c0a5a540dcefbf353774c99eb3874513/coverage-7.13.2-cp314-cp314t-win32.whl", hash = "sha256:79f6506a678a59d4ded048dc72f1859ebede8ec2b9a2d509ebe161f01c2879d3", size = 222749, upload-time = "2026-01-25T12:59:56.316Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/2f/838a5394c082ac57d85f57f6aba53093b30d9089781df72412126505716f/coverage-7.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:196bfeabdccc5a020a57d5a368c681e3a6ceb0447d153aeccc1ab4d70a5032ba", size = 223857, upload-time = "2026-01-25T12:59:58.201Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/44/d4/b608243e76ead3a4298824b50922b89ef793e50069ce30316a65c1b4d7ef/coverage-7.13.2-cp314-cp314t-win_arm64.whl", hash = "sha256:69269ab58783e090bfbf5b916ab3d188126e22d6070bbfc93098fdd474ef937c", size = 221881, upload-time = "2026-01-25T13:00:00.449Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d2/db/d291e30fdf7ea617a335531e72294e0c723356d7fdde8fba00610a76bda9/coverage-7.13.2-py3-none-any.whl", hash = "sha256:40ce1ea1e25125556d8e76bd0b61500839a07944cc287ac21d5626f3e620cad5", size = 210943, upload-time = "2026-01-25T13:00:02.388Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.optional-dependencies]
|
||||||
|
toml = [
|
||||||
|
{ name = "tomli", marker = "python_full_version <= '3.11'" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "elastic-transport"
|
||||||
|
version = "9.2.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "certifi" },
|
||||||
|
{ name = "sniffio" },
|
||||||
|
{ name = "urllib3" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/23/0a/a92140b666afdcb9862a16e4d80873b3c887c1b7e3f17e945fc3460edf1b/elastic_transport-9.2.1.tar.gz", hash = "sha256:97d9abd638ba8aa90faa4ca1bf1a18bde0fe2088fbc8757f2eb7b299f205773d", size = 77403, upload-time = "2025-12-23T11:54:12.849Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2c/e6/a42b600ae8b808371f740381f6c32050cad93f870d36cc697b8b7006bf7c/elastic_transport-9.2.1-py3-none-any.whl", hash = "sha256:39e1a25e486af34ce7aa1bc9005d1c736f1b6fb04c9b64ea0604ded5a61fc1d4", size = 65327, upload-time = "2025-12-23T11:54:11.681Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "elasticsearch"
|
||||||
|
version = "9.2.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "anyio" },
|
||||||
|
{ name = "elastic-transport" },
|
||||||
|
{ name = "python-dateutil" },
|
||||||
|
{ name = "sniffio" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/bc/6c/67bb17ca0035b0cac4cfbbe64e18d120203fef22da66dd4c636563a0ea63/elasticsearch-9.2.1.tar.gz", hash = "sha256:97f473418e8976611349757287ac982acf12f4e305182863d985d5a031c36830", size = 878062, upload-time = "2025-12-23T14:37:31.694Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c0/d5/84264c29ec67f2f8129676ce11f05defb52f44e97e5f411db9a220f2aa43/elasticsearch-9.2.1-py3-none-any.whl", hash = "sha256:8665f5a0b4d29a7c2772851c05ea8a09279abb7928b7d727524613bd61d75958", size = 963593, upload-time = "2025-12-23T14:37:28.047Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "es-ob-migration"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = { editable = "." }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "click" },
|
||||||
|
{ name = "elasticsearch" },
|
||||||
|
{ name = "pymysql" },
|
||||||
|
{ name = "pyobvector" },
|
||||||
|
{ name = "rich" },
|
||||||
|
{ name = "sqlalchemy" },
|
||||||
|
{ name = "tqdm" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
{ name = "pytest" },
|
||||||
|
{ name = "pytest-asyncio" },
|
||||||
|
{ name = "pytest-cov" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dev-dependencies]
|
||||||
|
dev = [
|
||||||
|
{ name = "pytest" },
|
||||||
|
{ name = "pytest-asyncio" },
|
||||||
|
{ name = "pytest-cov" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.metadata]
|
||||||
|
requires-dist = [
|
||||||
|
{ name = "click", specifier = ">=8.0.0" },
|
||||||
|
{ name = "elasticsearch", specifier = ">=8.0.0" },
|
||||||
|
{ name = "pymysql", specifier = ">=1.0.0" },
|
||||||
|
{ name = "pyobvector", specifier = ">=0.1.0" },
|
||||||
|
{ name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0.0" },
|
||||||
|
{ name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.21.0" },
|
||||||
|
{ name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.0.0" },
|
||||||
|
{ name = "rich", specifier = ">=13.0.0" },
|
||||||
|
{ name = "sqlalchemy", specifier = ">=2.0.0" },
|
||||||
|
{ name = "tqdm", specifier = ">=4.60.0" },
|
||||||
|
]
|
||||||
|
provides-extras = ["dev"]
|
||||||
|
|
||||||
|
[package.metadata.requires-dev]
|
||||||
|
dev = [
|
||||||
|
{ name = "pytest", specifier = ">=7.0.0" },
|
||||||
|
{ name = "pytest-asyncio", specifier = ">=0.21.0" },
|
||||||
|
{ name = "pytest-cov", specifier = ">=4.0.0" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "exceptiongroup"
|
||||||
|
version = "1.3.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "greenlet"
|
||||||
|
version = "3.3.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/8a/99/1cd3411c56a410994669062bd73dd58270c00cc074cac15f385a1fd91f8a/greenlet-3.3.1.tar.gz", hash = "sha256:41848f3230b58c08bb43dee542e74a2a2e34d3c59dc3076cec9151aeeedcae98", size = 184690, upload-time = "2026-01-23T15:31:02.076Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fe/65/5b235b40581ad75ab97dcd8b4218022ae8e3ab77c13c919f1a1dfe9171fd/greenlet-3.3.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:04bee4775f40ecefcdaa9d115ab44736cd4b9c5fba733575bfe9379419582e13", size = 273723, upload-time = "2026-01-23T15:30:37.521Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ce/ad/eb4729b85cba2d29499e0a04ca6fbdd8f540afd7be142fd571eea43d712f/greenlet-3.3.1-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:50e1457f4fed12a50e427988a07f0f9df53cf0ee8da23fab16e6732c2ec909d4", size = 574874, upload-time = "2026-01-23T16:00:54.551Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/32/57cad7fe4c8b82fdaa098c89498ef85ad92dfbb09d5eb713adedfc2ae1f5/greenlet-3.3.1-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:070472cd156f0656f86f92e954591644e158fd65aa415ffbe2d44ca77656a8f5", size = 586309, upload-time = "2026-01-23T16:05:25.18Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/66/66/f041005cb87055e62b0d68680e88ec1a57f4688523d5e2fb305841bc8307/greenlet-3.3.1-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1108b61b06b5224656121c3c8ee8876161c491cbe74e5c519e0634c837cf93d5", size = 597461, upload-time = "2026-01-23T16:15:51.943Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/eb/8a1ec2da4d55824f160594a75a9d8354a5fe0a300fb1c48e7944265217e1/greenlet-3.3.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a300354f27dd86bae5fbf7002e6dd2b3255cd372e9242c933faf5e859b703fe", size = 586985, upload-time = "2026-01-23T15:32:47.968Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/15/1c/0621dd4321dd8c351372ee8f9308136acb628600658a49be1b7504208738/greenlet-3.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e84b51cbebf9ae573b5fbd15df88887815e3253fc000a7d0ff95170e8f7e9729", size = 1547271, upload-time = "2026-01-23T16:04:18.977Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9d/53/24047f8924c83bea7a59c8678d9571209c6bfe5f4c17c94a78c06024e9f2/greenlet-3.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0093bd1a06d899892427217f0ff2a3c8f306182b8c754336d32e2d587c131b4", size = 1613427, upload-time = "2026-01-23T15:33:44.428Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ff/07/ac9bf1ec008916d1a3373cae212884c1dcff4a4ba0d41127ce81a8deb4e9/greenlet-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:7932f5f57609b6a3b82cc11877709aa7a98e3308983ed93552a1c377069b20c8", size = 226100, upload-time = "2026-01-23T15:30:56.957Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/e8/2e1462c8fdbe0f210feb5ac7ad2d9029af8be3bf45bd9fa39765f821642f/greenlet-3.3.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:5fd23b9bc6d37b563211c6abbb1b3cab27db385a4449af5c32e932f93017080c", size = 274974, upload-time = "2026-01-23T15:31:02.891Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7e/a8/530a401419a6b302af59f67aaf0b9ba1015855ea7e56c036b5928793c5bd/greenlet-3.3.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09f51496a0bfbaa9d74d36a52d2580d1ef5ed4fdfcff0a73730abfbbbe1403dd", size = 577175, upload-time = "2026-01-23T16:00:56.213Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8e/89/7e812bb9c05e1aaef9b597ac1d0962b9021d2c6269354966451e885c4e6b/greenlet-3.3.1-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb0feb07fe6e6a74615ee62a880007d976cf739b6669cce95daa7373d4fc69c5", size = 590401, upload-time = "2026-01-23T16:05:26.365Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/70/ae/e2d5f0e59b94a2269b68a629173263fa40b63da32f5c231307c349315871/greenlet-3.3.1-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:67ea3fc73c8cd92f42467a72b75e8f05ed51a0e9b1d15398c913416f2dafd49f", size = 601161, upload-time = "2026-01-23T16:15:53.456Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5c/ae/8d472e1f5ac5efe55c563f3eabb38c98a44b832602e12910750a7c025802/greenlet-3.3.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:39eda9ba259cc9801da05351eaa8576e9aa83eb9411e8f0c299e05d712a210f2", size = 590272, upload-time = "2026-01-23T15:32:49.411Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a8/51/0fde34bebfcadc833550717eade64e35ec8738e6b097d5d248274a01258b/greenlet-3.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e2e7e882f83149f0a71ac822ebf156d902e7a5d22c9045e3e0d1daf59cee2cc9", size = 1550729, upload-time = "2026-01-23T16:04:20.867Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/16/c9/2fb47bee83b25b119d5a35d580807bb8b92480a54b68fef009a02945629f/greenlet-3.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80aa4d79eb5564f2e0a6144fcc744b5a37c56c4a92d60920720e99210d88db0f", size = 1615552, upload-time = "2026-01-23T15:33:45.743Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1f/54/dcf9f737b96606f82f8dd05becfb8d238db0633dd7397d542a296fe9cad3/greenlet-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:32e4ca9777c5addcbf42ff3915d99030d8e00173a56f80001fb3875998fe410b", size = 226462, upload-time = "2026-01-23T15:36:50.422Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/91/37/61e1015cf944ddd2337447d8e97fb423ac9bc21f9963fb5f206b53d65649/greenlet-3.3.1-cp311-cp311-win_arm64.whl", hash = "sha256:da19609432f353fed186cc1b85e9440db93d489f198b4bdf42ae19cc9d9ac9b4", size = 225715, upload-time = "2026-01-23T15:33:17.298Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f9/c8/9d76a66421d1ae24340dfae7e79c313957f6e3195c144d2c73333b5bfe34/greenlet-3.3.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7e806ca53acf6d15a888405880766ec84721aa4181261cd11a457dfe9a7a4975", size = 276443, upload-time = "2026-01-23T15:30:10.066Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/81/99/401ff34bb3c032d1f10477d199724f5e5f6fbfb59816ad1455c79c1eb8e7/greenlet-3.3.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d842c94b9155f1c9b3058036c24ffb8ff78b428414a19792b2380be9cecf4f36", size = 597359, upload-time = "2026-01-23T16:00:57.394Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2b/bc/4dcc0871ed557792d304f50be0f7487a14e017952ec689effe2180a6ff35/greenlet-3.3.1-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20fedaadd422fa02695f82093f9a98bad3dab5fcda793c658b945fcde2ab27ba", size = 607805, upload-time = "2026-01-23T16:05:28.068Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3b/cd/7a7ca57588dac3389e97f7c9521cb6641fd8b6602faf1eaa4188384757df/greenlet-3.3.1-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c620051669fd04ac6b60ebc70478210119c56e2d5d5df848baec4312e260e4ca", size = 622363, upload-time = "2026-01-23T16:15:54.754Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cf/05/821587cf19e2ce1f2b24945d890b164401e5085f9d09cbd969b0c193cd20/greenlet-3.3.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14194f5f4305800ff329cbf02c5fcc88f01886cadd29941b807668a45f0d2336", size = 609947, upload-time = "2026-01-23T15:32:51.004Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a4/52/ee8c46ed9f8babaa93a19e577f26e3d28a519feac6350ed6f25f1afee7e9/greenlet-3.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7b2fe4150a0cf59f847a67db8c155ac36aed89080a6a639e9f16df5d6c6096f1", size = 1567487, upload-time = "2026-01-23T16:04:22.125Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8f/7c/456a74f07029597626f3a6db71b273a3632aecb9afafeeca452cfa633197/greenlet-3.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49f4ad195d45f4a66a0eb9c1ba4832bb380570d361912fa3554746830d332149", size = 1636087, upload-time = "2026-01-23T15:33:47.486Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/34/2f/5e0e41f33c69655300a5e54aeb637cf8ff57f1786a3aba374eacc0228c1d/greenlet-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:cc98b9c4e4870fa983436afa999d4eb16b12872fab7071423d5262fa7120d57a", size = 227156, upload-time = "2026-01-23T15:34:34.808Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/ab/717c58343cf02c5265b531384b248787e04d8160b8afe53d9eec053d7b44/greenlet-3.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:bfb2d1763d777de5ee495c85309460f6fd8146e50ec9d0ae0183dbf6f0a829d1", size = 226403, upload-time = "2026-01-23T15:31:39.372Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/ab/d26750f2b7242c2b90ea2ad71de70cfcd73a948a49513188a0fc0d6fc15a/greenlet-3.3.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:7ab327905cabb0622adca5971e488064e35115430cec2c35a50fd36e72a315b3", size = 275205, upload-time = "2026-01-23T15:30:24.556Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/10/d3/be7d19e8fad7c5a78eeefb2d896a08cd4643e1e90c605c4be3b46264998f/greenlet-3.3.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:65be2f026ca6a176f88fb935ee23c18333ccea97048076aef4db1ef5bc0713ac", size = 599284, upload-time = "2026-01-23T16:00:58.584Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ae/21/fe703aaa056fdb0f17e5afd4b5c80195bbdab701208918938bd15b00d39b/greenlet-3.3.1-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7a3ae05b3d225b4155bda56b072ceb09d05e974bc74be6c3fc15463cf69f33fd", size = 610274, upload-time = "2026-01-23T16:05:29.312Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/06/00/95df0b6a935103c0452dad2203f5be8377e551b8466a29650c4c5a5af6cc/greenlet-3.3.1-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:12184c61e5d64268a160226fb4818af4df02cfead8379d7f8b99a56c3a54ff3e", size = 624375, upload-time = "2026-01-23T16:15:55.915Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cb/86/5c6ab23bb3c28c21ed6bebad006515cfe08b04613eb105ca0041fecca852/greenlet-3.3.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6423481193bbbe871313de5fd06a082f2649e7ce6e08015d2a76c1e9186ca5b3", size = 612904, upload-time = "2026-01-23T15:32:52.317Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c2/f3/7949994264e22639e40718c2daf6f6df5169bf48fb038c008a489ec53a50/greenlet-3.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:33a956fe78bbbda82bfc95e128d61129b32d66bcf0a20a1f0c08aa4839ffa951", size = 1567316, upload-time = "2026-01-23T16:04:23.316Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8d/6e/d73c94d13b6465e9f7cd6231c68abde838bb22408596c05d9059830b7872/greenlet-3.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b065d3284be43728dd280f6f9a13990b56470b81be20375a207cdc814a983f2", size = 1636549, upload-time = "2026-01-23T15:33:48.643Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5e/b3/c9c23a6478b3bcc91f979ce4ca50879e4d0b2bd7b9a53d8ecded719b92e2/greenlet-3.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:27289986f4e5b0edec7b5a91063c109f0276abb09a7e9bdab08437525977c946", size = 227042, upload-time = "2026-01-23T15:33:58.216Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/90/e7/824beda656097edee36ab15809fd063447b200cc03a7f6a24c34d520bc88/greenlet-3.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:2f080e028001c5273e0b42690eaf359aeef9cb1389da0f171ea51a5dc3c7608d", size = 226294, upload-time = "2026-01-23T15:30:52.73Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ae/fb/011c7c717213182caf78084a9bea51c8590b0afda98001f69d9f853a495b/greenlet-3.3.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:bd59acd8529b372775cd0fcbc5f420ae20681c5b045ce25bd453ed8455ab99b5", size = 275737, upload-time = "2026-01-23T15:32:16.889Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/41/2e/a3a417d620363fdbb08a48b1dd582956a46a61bf8fd27ee8164f9dfe87c2/greenlet-3.3.1-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b31c05dd84ef6871dd47120386aed35323c944d86c3d91a17c4b8d23df62f15b", size = 646422, upload-time = "2026-01-23T16:01:00.354Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b4/09/c6c4a0db47defafd2d6bab8ddfe47ad19963b4e30f5bed84d75328059f8c/greenlet-3.3.1-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:02925a0bfffc41e542c70aa14c7eda3593e4d7e274bfcccca1827e6c0875902e", size = 658219, upload-time = "2026-01-23T16:05:30.956Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e2/89/b95f2ddcc5f3c2bc09c8ee8d77be312df7f9e7175703ab780f2014a0e781/greenlet-3.3.1-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3e0f3878ca3a3ff63ab4ea478585942b53df66ddde327b59ecb191b19dbbd62d", size = 671455, upload-time = "2026-01-23T16:15:57.232Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/80/38/9d42d60dffb04b45f03dbab9430898352dba277758640751dc5cc316c521/greenlet-3.3.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34a729e2e4e4ffe9ae2408d5ecaf12f944853f40ad724929b7585bca808a9d6f", size = 660237, upload-time = "2026-01-23T15:32:53.967Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/96/61/373c30b7197f9e756e4c81ae90a8d55dc3598c17673f91f4d31c3c689c3f/greenlet-3.3.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:aec9ab04e82918e623415947921dea15851b152b822661cce3f8e4393c3df683", size = 1615261, upload-time = "2026-01-23T16:04:25.066Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fd/d3/ca534310343f5945316f9451e953dcd89b36fe7a19de652a1dc5a0eeef3f/greenlet-3.3.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:71c767cf281a80d02b6c1bdc41c9468e1f5a494fb11bc8688c360524e273d7b1", size = 1683719, upload-time = "2026-01-23T15:33:50.61Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/52/cb/c21a3fd5d2c9c8b622e7bede6d6d00e00551a5ee474ea6d831b5f567a8b4/greenlet-3.3.1-cp314-cp314-win_amd64.whl", hash = "sha256:96aff77af063b607f2489473484e39a0bbae730f2ea90c9e5606c9b73c44174a", size = 228125, upload-time = "2026-01-23T15:32:45.265Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6a/8e/8a2db6d11491837af1de64b8aff23707c6e85241be13c60ed399a72e2ef8/greenlet-3.3.1-cp314-cp314-win_arm64.whl", hash = "sha256:b066e8b50e28b503f604fa538adc764a638b38cf8e81e025011d26e8a627fa79", size = 227519, upload-time = "2026-01-23T15:31:47.284Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/28/24/cbbec49bacdcc9ec652a81d3efef7b59f326697e7edf6ed775a5e08e54c2/greenlet-3.3.1-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:3e63252943c921b90abb035ebe9de832c436401d9c45f262d80e2d06cc659242", size = 282706, upload-time = "2026-01-23T15:33:05.525Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/86/2e/4f2b9323c144c4fe8842a4e0d92121465485c3c2c5b9e9b30a52e80f523f/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76e39058e68eb125de10c92524573924e827927df5d3891fbc97bd55764a8774", size = 651209, upload-time = "2026-01-23T16:01:01.517Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d9/87/50ca60e515f5bb55a2fbc5f0c9b5b156de7d2fc51a0a69abc9d23914a237/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c9f9d5e7a9310b7a2f416dd13d2e3fd8b42d803968ea580b7c0f322ccb389b97", size = 654300, upload-time = "2026-01-23T16:05:32.199Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7c/25/c51a63f3f463171e09cb586eb64db0861eb06667ab01a7968371a24c4f3b/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b9721549a95db96689458a1e0ae32412ca18776ed004463df3a9299c1b257ab", size = 662574, upload-time = "2026-01-23T16:15:58.364Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1d/94/74310866dfa2b73dd08659a3d18762f83985ad3281901ba0ee9a815194fb/greenlet-3.3.1-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92497c78adf3ac703b57f1e3813c2d874f27f71a178f9ea5887855da413cd6d2", size = 653842, upload-time = "2026-01-23T15:32:55.671Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/97/43/8bf0ffa3d498eeee4c58c212a3905dd6146c01c8dc0b0a046481ca29b18c/greenlet-3.3.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ed6b402bc74d6557a705e197d47f9063733091ed6357b3de33619d8a8d93ac53", size = 1614917, upload-time = "2026-01-23T16:04:26.276Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/89/90/a3be7a5f378fc6e84abe4dcfb2ba32b07786861172e502388b4c90000d1b/greenlet-3.3.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:59913f1e5ada20fde795ba906916aea25d442abcc0593fba7e26c92b7ad76249", size = 1676092, upload-time = "2026-01-23T15:33:52.176Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e1/2b/98c7f93e6db9977aaee07eb1e51ca63bd5f779b900d362791d3252e60558/greenlet-3.3.1-cp314-cp314t-win_amd64.whl", hash = "sha256:301860987846c24cb8964bdec0e31a96ad4a2a801b41b4ef40963c1b44f33451", size = 233181, upload-time = "2026-01-23T15:33:00.29Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "idna"
|
||||||
|
version = "3.11"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "iniconfig"
|
||||||
|
version = "2.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "markdown-it-py"
|
||||||
|
version = "4.0.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "mdurl" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mdurl"
|
||||||
|
version = "0.1.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "numpy"
|
||||||
|
version = "2.2.6"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
resolution-markers = [
|
||||||
|
"python_full_version < '3.11'",
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd", size = 20276440, upload-time = "2025-05-17T22:38:04.611Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb", size = 21165245, upload-time = "2025-05-17T21:27:58.555Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90", size = 14360048, upload-time = "2025-05-17T21:28:21.406Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fd/77/dc2fcfc66943c6410e2bf598062f5959372735ffda175b39906d54f02349/numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163", size = 5340542, upload-time = "2025-05-17T21:28:30.931Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7a/4f/1cb5fdc353a5f5cc7feb692db9b8ec2c3d6405453f982435efc52561df58/numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf", size = 6878301, upload-time = "2025-05-17T21:28:41.613Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/eb/17/96a3acd228cec142fcb8723bd3cc39c2a474f7dcf0a5d16731980bcafa95/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83", size = 14297320, upload-time = "2025-05-17T21:29:02.78Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915", size = 16801050, upload-time = "2025-05-17T21:29:27.675Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/07/b6/89d837eddef52b3d0cec5c6ba0456c1bf1b9ef6a6672fc2b7873c3ec4e2e/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680", size = 15807034, upload-time = "2025-05-17T21:29:51.102Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/01/c8/dc6ae86e3c61cfec1f178e5c9f7858584049b6093f843bca541f94120920/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289", size = 18614185, upload-time = "2025-05-17T21:30:18.703Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5b/c5/0064b1b7e7c89137b471ccec1fd2282fceaae0ab3a9550f2568782d80357/numpy-2.2.6-cp310-cp310-win32.whl", hash = "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d", size = 6527149, upload-time = "2025-05-17T21:30:29.788Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a3/dd/4b822569d6b96c39d1215dbae0582fd99954dcbcf0c1a13c61783feaca3f/numpy-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3", size = 12904620, upload-time = "2025-05-17T21:30:48.994Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/da/a8/4f83e2aa666a9fbf56d6118faaaf5f1974d456b1823fda0a176eff722839/numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae", size = 21176963, upload-time = "2025-05-17T21:31:19.36Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b3/2b/64e1affc7972decb74c9e29e5649fac940514910960ba25cd9af4488b66c/numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a", size = 14406743, upload-time = "2025-05-17T21:31:41.087Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4a/9f/0121e375000b5e50ffdd8b25bf78d8e1a5aa4cca3f185d41265198c7b834/numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42", size = 5352616, upload-time = "2025-05-17T21:31:50.072Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/31/0d/b48c405c91693635fbe2dcd7bc84a33a602add5f63286e024d3b6741411c/numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491", size = 6889579, upload-time = "2025-05-17T21:32:01.712Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a", size = 14312005, upload-time = "2025-05-17T21:32:23.332Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf", size = 16821570, upload-time = "2025-05-17T21:32:47.991Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1", size = 15818548, upload-time = "2025-05-17T21:33:11.728Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab", size = 18620521, upload-time = "2025-05-17T21:33:39.139Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6a/fd/e19617b9530b031db51b0926eed5345ce8ddc669bb3bc0044b23e275ebe8/numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47", size = 6525866, upload-time = "2025-05-17T21:33:50.273Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/31/0a/f354fb7176b81747d870f7991dc763e157a934c717b67b58456bc63da3df/numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303", size = 12907455, upload-time = "2025-05-17T21:34:09.135Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/82/5d/c00588b6cf18e1da539b45d3598d3557084990dcc4331960c15ee776ee41/numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff", size = 20875348, upload-time = "2025-05-17T21:34:39.648Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/66/ee/560deadcdde6c2f90200450d5938f63a34b37e27ebff162810f716f6a230/numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c", size = 14119362, upload-time = "2025-05-17T21:35:01.241Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/65/4baa99f1c53b30adf0acd9a5519078871ddde8d2339dc5a7fde80d9d87da/numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3", size = 5084103, upload-time = "2025-05-17T21:35:10.622Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cc/89/e5a34c071a0570cc40c9a54eb472d113eea6d002e9ae12bb3a8407fb912e/numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282", size = 6625382, upload-time = "2025-05-17T21:35:21.414Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87", size = 14018462, upload-time = "2025-05-17T21:35:42.174Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249", size = 16527618, upload-time = "2025-05-17T21:36:06.711Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49", size = 15505511, upload-time = "2025-05-17T21:36:29.965Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de", size = 18313783, upload-time = "2025-05-17T21:36:56.883Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4", size = 6246506, upload-time = "2025-05-17T21:37:07.368Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2", size = 12614190, upload-time = "2025-05-17T21:37:26.213Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f9/5c/6657823f4f594f72b5471f1db1ab12e26e890bb2e41897522d134d2a3e81/numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84", size = 20867828, upload-time = "2025-05-17T21:37:56.699Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/dc/9e/14520dc3dadf3c803473bd07e9b2bd1b69bc583cb2497b47000fed2fa92f/numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b", size = 14143006, upload-time = "2025-05-17T21:38:18.291Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4f/06/7e96c57d90bebdce9918412087fc22ca9851cceaf5567a45c1f404480e9e/numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d", size = 5076765, upload-time = "2025-05-17T21:38:27.319Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/73/ed/63d920c23b4289fdac96ddbdd6132e9427790977d5457cd132f18e76eae0/numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566", size = 6617736, upload-time = "2025-05-17T21:38:38.141Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/85/c5/e19c8f99d83fd377ec8c7e0cf627a8049746da54afc24ef0a0cb73d5dfb5/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f", size = 14010719, upload-time = "2025-05-17T21:38:58.433Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/19/49/4df9123aafa7b539317bf6d342cb6d227e49f7a35b99c287a6109b13dd93/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f", size = 16526072, upload-time = "2025-05-17T21:39:22.638Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b2/6c/04b5f47f4f32f7c2b0e7260442a8cbcf8168b0e1a41ff1495da42f42a14f/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868", size = 15503213, upload-time = "2025-05-17T21:39:45.865Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/17/0a/5cd92e352c1307640d5b6fec1b2ffb06cd0dabe7d7b8227f97933d378422/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d", size = 18316632, upload-time = "2025-05-17T21:40:13.331Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f0/3b/5cba2b1d88760ef86596ad0f3d484b1cbff7c115ae2429678465057c5155/numpy-2.2.6-cp313-cp313-win32.whl", hash = "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd", size = 6244532, upload-time = "2025-05-17T21:43:46.099Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cb/3b/d58c12eafcb298d4e6d0d40216866ab15f59e55d148a5658bb3132311fcf/numpy-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c", size = 12610885, upload-time = "2025-05-17T21:44:05.145Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6b/9e/4bf918b818e516322db999ac25d00c75788ddfd2d2ade4fa66f1f38097e1/numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6", size = 20963467, upload-time = "2025-05-17T21:40:44Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/61/66/d2de6b291507517ff2e438e13ff7b1e2cdbdb7cb40b3ed475377aece69f9/numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda", size = 14225144, upload-time = "2025-05-17T21:41:05.695Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/25/480387655407ead912e28ba3a820bc69af9adf13bcbe40b299d454ec011f/numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40", size = 5200217, upload-time = "2025-05-17T21:41:15.903Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/4a/6e313b5108f53dcbf3aca0c0f3e9c92f4c10ce57a0a721851f9785872895/numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8", size = 6712014, upload-time = "2025-05-17T21:41:27.321Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b7/30/172c2d5c4be71fdf476e9de553443cf8e25feddbe185e0bd88b096915bcc/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f", size = 14077935, upload-time = "2025-05-17T21:41:49.738Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/12/fb/9e743f8d4e4d3c710902cf87af3512082ae3d43b945d5d16563f26ec251d/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa", size = 16600122, upload-time = "2025-05-17T21:42:14.046Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/12/75/ee20da0e58d3a66f204f38916757e01e33a9737d0b22373b3eb5a27358f9/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571", size = 15586143, upload-time = "2025-05-17T21:42:37.464Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/76/95/bef5b37f29fc5e739947e9ce5179ad402875633308504a52d188302319c8/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1", size = 18385260, upload-time = "2025-05-17T21:43:05.189Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/09/04/f2f83279d287407cf36a7a8053a5abe7be3622a4363337338f2585e4afda/numpy-2.2.6-cp313-cp313t-win32.whl", hash = "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff", size = 6377225, upload-time = "2025-05-17T21:43:16.254Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/67/0e/35082d13c09c02c011cf21570543d202ad929d961c02a147493cb0c2bdf5/numpy-2.2.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06", size = 12771374, upload-time = "2025-05-17T21:43:35.479Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9e/3b/d94a75f4dbf1ef5d321523ecac21ef23a3cd2ac8b78ae2aac40873590229/numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d", size = 21040391, upload-time = "2025-05-17T21:44:35.948Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/17/f4/09b2fa1b58f0fb4f7c7963a1649c64c4d315752240377ed74d9cd878f7b5/numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db", size = 6786754, upload-time = "2025-05-17T21:44:47.446Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543", size = 16643476, upload-time = "2025-05-17T21:45:11.871Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/37/48/ac2a9584402fb6c0cd5b5d1a91dcf176b15760130dd386bbafdbfe3640bf/numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00", size = 12812666, upload-time = "2025-05-17T21:45:31.426Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "numpy"
|
||||||
|
version = "2.4.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
resolution-markers = [
|
||||||
|
"python_full_version >= '3.11'",
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/24/62/ae72ff66c0f1fd959925b4c11f8c2dea61f47f6acaea75a08512cdfe3fed/numpy-2.4.1.tar.gz", hash = "sha256:a1ceafc5042451a858231588a104093474c6a5c57dcc724841f5c888d237d690", size = 20721320, upload-time = "2026-01-10T06:44:59.619Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a5/34/2b1bc18424f3ad9af577f6ce23600319968a70575bd7db31ce66731bbef9/numpy-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0cce2a669e3c8ba02ee563c7835f92c153cf02edff1ae05e1823f1dde21b16a5", size = 16944563, upload-time = "2026-01-10T06:42:14.615Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2c/57/26e5f97d075aef3794045a6ca9eada6a4ed70eb9a40e7a4a93f9ac80d704/numpy-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:899d2c18024984814ac7e83f8f49d8e8180e2fbe1b2e252f2e7f1d06bea92425", size = 12645658, upload-time = "2026-01-10T06:42:17.298Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8e/ba/80fc0b1e3cb2fd5c6143f00f42eb67762aa043eaa05ca924ecc3222a7849/numpy-2.4.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:09aa8a87e45b55a1c2c205d42e2808849ece5c484b2aab11fecabec3841cafba", size = 5474132, upload-time = "2026-01-10T06:42:19.637Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/40/ae/0a5b9a397f0e865ec171187c78d9b57e5588afc439a04ba9cab1ebb2c945/numpy-2.4.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:edee228f76ee2dab4579fad6f51f6a305de09d444280109e0f75df247ff21501", size = 6804159, upload-time = "2026-01-10T06:42:21.44Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/86/9c/841c15e691c7085caa6fd162f063eff494099c8327aeccd509d1ab1e36ab/numpy-2.4.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a92f227dbcdc9e4c3e193add1a189a9909947d4f8504c576f4a732fd0b54240a", size = 14708058, upload-time = "2026-01-10T06:42:23.546Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5d/9d/7862db06743f489e6a502a3b93136d73aea27d97b2cf91504f70a27501d6/numpy-2.4.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:538bf4ec353709c765ff75ae616c34d3c3dca1a68312727e8f2676ea644f8509", size = 16651501, upload-time = "2026-01-10T06:42:25.909Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a6/9c/6fc34ebcbd4015c6e5f0c0ce38264010ce8a546cb6beacb457b84a75dfc8/numpy-2.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ac08c63cb7779b85e9d5318e6c3518b424bc1f364ac4cb2c6136f12e5ff2dccc", size = 16492627, upload-time = "2026-01-10T06:42:28.938Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/63/2494a8597502dacda439f61b3c0db4da59928150e62be0e99395c3ad23c5/numpy-2.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f9c360ecef085e5841c539a9a12b883dff005fbd7ce46722f5e9cef52634d82", size = 18585052, upload-time = "2026-01-10T06:42:31.312Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6a/93/098e1162ae7522fc9b618d6272b77404c4656c72432ecee3abc029aa3de0/numpy-2.4.1-cp311-cp311-win32.whl", hash = "sha256:0f118ce6b972080ba0758c6087c3617b5ba243d806268623dc34216d69099ba0", size = 6236575, upload-time = "2026-01-10T06:42:33.872Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8c/de/f5e79650d23d9e12f38a7bc6b03ea0835b9575494f8ec94c11c6e773b1b1/numpy-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:18e14c4d09d55eef39a6ab5b08406e84bc6869c1e34eef45564804f90b7e0574", size = 12604479, upload-time = "2026-01-10T06:42:35.778Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/dd/65/e1097a7047cff12ce3369bd003811516b20ba1078dbdec135e1cd7c16c56/numpy-2.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:6461de5113088b399d655d45c3897fa188766415d0f568f175ab071c8873bd73", size = 10578325, upload-time = "2026-01-10T06:42:38.518Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/78/7f/ec53e32bf10c813604edf07a3682616bd931d026fcde7b6d13195dfb684a/numpy-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d3703409aac693fa82c0aee023a1ae06a6e9d065dba10f5e8e80f642f1e9d0a2", size = 16656888, upload-time = "2026-01-10T06:42:40.913Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b8/e0/1f9585d7dae8f14864e948fd7fa86c6cb72dee2676ca2748e63b1c5acfe0/numpy-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7211b95ca365519d3596a1d8688a95874cc94219d417504d9ecb2df99fa7bfa8", size = 12373956, upload-time = "2026-01-10T06:42:43.091Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8e/43/9762e88909ff2326f5e7536fa8cb3c49fb03a7d92705f23e6e7f553d9cb3/numpy-2.4.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5adf01965456a664fc727ed69cc71848f28d063217c63e1a0e200a118d5eec9a", size = 5202567, upload-time = "2026-01-10T06:42:45.107Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4b/ee/34b7930eb61e79feb4478800a4b95b46566969d837546aa7c034c742ef98/numpy-2.4.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26f0bcd9c79a00e339565b303badc74d3ea2bd6d52191eeca5f95936cad107d0", size = 6549459, upload-time = "2026-01-10T06:42:48.152Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/79/e3/5f115fae982565771be994867c89bcd8d7208dbfe9469185497d70de5ddf/numpy-2.4.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0093e85df2960d7e4049664b26afc58b03236e967fb942354deef3208857a04c", size = 14404859, upload-time = "2026-01-10T06:42:49.947Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d9/7d/9c8a781c88933725445a859cac5d01b5871588a15969ee6aeb618ba99eee/numpy-2.4.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad270f438cbdd402c364980317fb6b117d9ec5e226fff5b4148dd9aa9fc6e02", size = 16371419, upload-time = "2026-01-10T06:42:52.409Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a6/d2/8aa084818554543f17cf4162c42f162acbd3bb42688aefdba6628a859f77/numpy-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:297c72b1b98100c2e8f873d5d35fb551fce7040ade83d67dd51d38c8d42a2162", size = 16182131, upload-time = "2026-01-10T06:42:54.694Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/60/db/0425216684297c58a8df35f3284ef56ec4a043e6d283f8a59c53562caf1b/numpy-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf6470d91d34bf669f61d515499859fa7a4c2f7c36434afb70e82df7217933f9", size = 18295342, upload-time = "2026-01-10T06:42:56.991Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/31/4c/14cb9d86240bd8c386c881bafbe43f001284b7cce3bc01623ac9475da163/numpy-2.4.1-cp312-cp312-win32.whl", hash = "sha256:b6bcf39112e956594b3331316d90c90c90fb961e39696bda97b89462f5f3943f", size = 5959015, upload-time = "2026-01-10T06:42:59.631Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/51/cf/52a703dbeb0c65807540d29699fef5fda073434ff61846a564d5c296420f/numpy-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:e1a27bb1b2dee45a2a53f5ca6ff2d1a7f135287883a1689e930d44d1ff296c87", size = 12310730, upload-time = "2026-01-10T06:43:01.627Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/69/80/a828b2d0ade5e74a9fe0f4e0a17c30fdc26232ad2bc8c9f8b3197cf7cf18/numpy-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:0e6e8f9d9ecf95399982019c01223dc130542960a12edfa8edd1122dfa66a8a8", size = 10312166, upload-time = "2026-01-10T06:43:03.673Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/04/68/732d4b7811c00775f3bd522a21e8dd5a23f77eb11acdeb663e4a4ebf0ef4/numpy-2.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d797454e37570cfd61143b73b8debd623c3c0952959adb817dd310a483d58a1b", size = 16652495, upload-time = "2026-01-10T06:43:06.283Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/20/ca/857722353421a27f1465652b2c66813eeeccea9d76d5f7b74b99f298e60e/numpy-2.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c55962006156aeef1629b953fd359064aa47e4d82cfc8e67f0918f7da3344f", size = 12368657, upload-time = "2026-01-10T06:43:09.094Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/81/0d/2377c917513449cc6240031a79d30eb9a163d32a91e79e0da47c43f2c0c8/numpy-2.4.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:71abbea030f2cfc3092a0ff9f8c8fdefdc5e0bf7d9d9c99663538bb0ecdac0b9", size = 5197256, upload-time = "2026-01-10T06:43:13.634Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/17/39/569452228de3f5de9064ac75137082c6214be1f5c532016549a7923ab4b5/numpy-2.4.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b55aa56165b17aaf15520beb9cbd33c9039810e0d9643dd4379e44294c7303e", size = 6545212, upload-time = "2026-01-10T06:43:15.661Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8c/a4/77333f4d1e4dac4395385482557aeecf4826e6ff517e32ca48e1dafbe42a/numpy-2.4.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0faba4a331195bfa96f93dd9dfaa10b2c7aa8cda3a02b7fd635e588fe821bf5", size = 14402871, upload-time = "2026-01-10T06:43:17.324Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ba/87/d341e519956273b39d8d47969dd1eaa1af740615394fe67d06f1efa68773/numpy-2.4.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e3087f53e2b4428766b54932644d148613c5a595150533ae7f00dab2f319a8", size = 16359305, upload-time = "2026-01-10T06:43:19.376Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/32/91/789132c6666288eaa20ae8066bb99eba1939362e8f1a534949a215246e97/numpy-2.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:49e792ec351315e16da54b543db06ca8a86985ab682602d90c60ef4ff4db2a9c", size = 16181909, upload-time = "2026-01-10T06:43:21.808Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cf/b8/090b8bd27b82a844bb22ff8fdf7935cb1980b48d6e439ae116f53cdc2143/numpy-2.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79e9e06c4c2379db47f3f6fc7a8652e7498251789bf8ff5bd43bf478ef314ca2", size = 18284380, upload-time = "2026-01-10T06:43:23.957Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/67/78/722b62bd31842ff029412271556a1a27a98f45359dea78b1548a3a9996aa/numpy-2.4.1-cp313-cp313-win32.whl", hash = "sha256:3d1a100e48cb266090a031397863ff8a30050ceefd798f686ff92c67a486753d", size = 5957089, upload-time = "2026-01-10T06:43:27.535Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/da/a6/cf32198b0b6e18d4fbfa9a21a992a7fca535b9bb2b0cdd217d4a3445b5ca/numpy-2.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:92a0e65272fd60bfa0d9278e0484c2f52fe03b97aedc02b357f33fe752c52ffb", size = 12307230, upload-time = "2026-01-10T06:43:29.298Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/44/6c/534d692bfb7d0afe30611320c5fb713659dcb5104d7cc182aff2aea092f5/numpy-2.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:20d4649c773f66cc2fc36f663e091f57c3b7655f936a4c681b4250855d1da8f5", size = 10313125, upload-time = "2026-01-10T06:43:31.782Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/da/a1/354583ac5c4caa566de6ddfbc42744409b515039e085fab6e0ff942e0df5/numpy-2.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f93bc6892fe7b0663e5ffa83b61aab510aacffd58c16e012bb9352d489d90cb7", size = 12496156, upload-time = "2026-01-10T06:43:34.237Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/51/b0/42807c6e8cce58c00127b1dc24d365305189991f2a7917aa694a109c8d7d/numpy-2.4.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:178de8f87948163d98a4c9ab5bee4ce6519ca918926ec8df195af582de28544d", size = 5324663, upload-time = "2026-01-10T06:43:36.211Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fe/55/7a621694010d92375ed82f312b2f28017694ed784775269115323e37f5e2/numpy-2.4.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:98b35775e03ab7f868908b524fc0a84d38932d8daf7b7e1c3c3a1b6c7a2c9f15", size = 6645224, upload-time = "2026-01-10T06:43:37.884Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/50/96/9fa8635ed9d7c847d87e30c834f7109fac5e88549d79ef3324ab5c20919f/numpy-2.4.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:941c2a93313d030f219f3a71fd3d91a728b82979a5e8034eb2e60d394a2b83f9", size = 14462352, upload-time = "2026-01-10T06:43:39.479Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/03/d1/8cf62d8bb2062da4fb82dd5d49e47c923f9c0738032f054e0a75342faba7/numpy-2.4.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:529050522e983e00a6c1c6b67411083630de8b57f65e853d7b03d9281b8694d2", size = 16407279, upload-time = "2026-01-10T06:43:41.93Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/86/1c/95c86e17c6b0b31ce6ef219da00f71113b220bcb14938c8d9a05cee0ff53/numpy-2.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2302dc0224c1cbc49bb94f7064f3f923a971bfae45c33870dcbff63a2a550505", size = 16248316, upload-time = "2026-01-10T06:43:44.121Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/30/b4/e7f5ff8697274c9d0fa82398b6a372a27e5cef069b37df6355ccb1f1db1a/numpy-2.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9171a42fcad32dcf3fa86f0a4faa5e9f8facefdb276f54b8b390d90447cff4e2", size = 18329884, upload-time = "2026-01-10T06:43:46.613Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/37/a4/b073f3e9d77f9aec8debe8ca7f9f6a09e888ad1ba7488f0c3b36a94c03ac/numpy-2.4.1-cp313-cp313t-win32.whl", hash = "sha256:382ad67d99ef49024f11d1ce5dcb5ad8432446e4246a4b014418ba3a1175a1f4", size = 6081138, upload-time = "2026-01-10T06:43:48.854Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/16/16/af42337b53844e67752a092481ab869c0523bc95c4e5c98e4dac4e9581ac/numpy-2.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:62fea415f83ad8fdb6c20840578e5fbaf5ddd65e0ec6c3c47eda0f69da172510", size = 12447478, upload-time = "2026-01-10T06:43:50.476Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6c/f8/fa85b2eac68ec631d0b631abc448552cb17d39afd17ec53dcbcc3537681a/numpy-2.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a7870e8c5fc11aef57d6fea4b4085e537a3a60ad2cdd14322ed531fdca68d261", size = 10382981, upload-time = "2026-01-10T06:43:52.575Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1b/a7/ef08d25698e0e4b4efbad8d55251d20fe2a15f6d9aa7c9b30cd03c165e6f/numpy-2.4.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3869ea1ee1a1edc16c29bbe3a2f2a4e515cc3a44d43903ad41e0cacdbaf733dc", size = 16652046, upload-time = "2026-01-10T06:43:54.797Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8f/39/e378b3e3ca13477e5ac70293ec027c438d1927f18637e396fe90b1addd72/numpy-2.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e867df947d427cdd7a60e3e271729090b0f0df80f5f10ab7dd436f40811699c3", size = 12378858, upload-time = "2026-01-10T06:43:57.099Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c3/74/7ec6154f0006910ed1fdbb7591cf4432307033102b8a22041599935f8969/numpy-2.4.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:e3bd2cb07841166420d2fa7146c96ce00cb3410664cbc1a6be028e456c4ee220", size = 5207417, upload-time = "2026-01-10T06:43:59.037Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f7/b7/053ac11820d84e42f8feea5cb81cc4fcd1091499b45b1ed8c7415b1bf831/numpy-2.4.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:f0a90aba7d521e6954670550e561a4cb925713bd944445dbe9e729b71f6cabee", size = 6542643, upload-time = "2026-01-10T06:44:01.852Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c0/c4/2e7908915c0e32ca636b92e4e4a3bdec4cb1e7eb0f8aedf1ed3c68a0d8cd/numpy-2.4.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d558123217a83b2d1ba316b986e9248a1ed1971ad495963d555ccd75dcb1556", size = 14418963, upload-time = "2026-01-10T06:44:04.047Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/eb/c0/3ed5083d94e7ffd7c404e54619c088e11f2e1939a9544f5397f4adb1b8ba/numpy-2.4.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f44de05659b67d20499cbc96d49f2650769afcb398b79b324bb6e297bfe3844", size = 16363811, upload-time = "2026-01-10T06:44:06.207Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0e/68/42b66f1852bf525050a67315a4fb94586ab7e9eaa541b1bef530fab0c5dd/numpy-2.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:69e7419c9012c4aaf695109564e3387f1259f001b4326dfa55907b098af082d3", size = 16197643, upload-time = "2026-01-10T06:44:08.33Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d2/40/e8714fc933d85f82c6bfc7b998a0649ad9769a32f3494ba86598aaf18a48/numpy-2.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2ffd257026eb1b34352e749d7cc1678b5eeec3e329ad8c9965a797e08ccba205", size = 18289601, upload-time = "2026-01-10T06:44:10.841Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/80/9a/0d44b468cad50315127e884802351723daca7cf1c98d102929468c81d439/numpy-2.4.1-cp314-cp314-win32.whl", hash = "sha256:727c6c3275ddefa0dc078524a85e064c057b4f4e71ca5ca29a19163c607be745", size = 6005722, upload-time = "2026-01-10T06:44:13.332Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7e/bb/c6513edcce5a831810e2dddc0d3452ce84d208af92405a0c2e58fd8e7881/numpy-2.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:7d5d7999df434a038d75a748275cd6c0094b0ecdb0837342b332a82defc4dc4d", size = 12438590, upload-time = "2026-01-10T06:44:15.006Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e9/da/a598d5cb260780cf4d255102deba35c1d072dc028c4547832f45dd3323a8/numpy-2.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:ce9ce141a505053b3c7bce3216071f3bf5c182b8b28930f14cd24d43932cd2df", size = 10596180, upload-time = "2026-01-10T06:44:17.386Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/de/bc/ea3f2c96fcb382311827231f911723aeff596364eb6e1b6d1d91128aa29b/numpy-2.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4e53170557d37ae404bf8d542ca5b7c629d6efa1117dac6a83e394142ea0a43f", size = 12498774, upload-time = "2026-01-10T06:44:19.467Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/ab/ef9d939fe4a812648c7a712610b2ca6140b0853c5efea361301006c02ae5/numpy-2.4.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:a73044b752f5d34d4232f25f18160a1cc418ea4507f5f11e299d8ac36875f8a0", size = 5327274, upload-time = "2026-01-10T06:44:23.189Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bd/31/d381368e2a95c3b08b8cf7faac6004849e960f4a042d920337f71cef0cae/numpy-2.4.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:fb1461c99de4d040666ca0444057b06541e5642f800b71c56e6ea92d6a853a0c", size = 6648306, upload-time = "2026-01-10T06:44:25.012Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/e5/0989b44ade47430be6323d05c23207636d67d7362a1796ccbccac6773dd2/numpy-2.4.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:423797bdab2eeefbe608d7c1ec7b2b4fd3c58d51460f1ee26c7500a1d9c9ee93", size = 14464653, upload-time = "2026-01-10T06:44:26.706Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/10/a7/cfbe475c35371cae1358e61f20c5f075badc18c4797ab4354140e1d283cf/numpy-2.4.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:52b5f61bdb323b566b528899cc7db2ba5d1015bda7ea811a8bcf3c89c331fa42", size = 16405144, upload-time = "2026-01-10T06:44:29.378Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f8/a3/0c63fe66b534888fa5177cc7cef061541064dbe2b4b60dcc60ffaf0d2157/numpy-2.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42d7dd5fa36d16d52a84f821eb96031836fd405ee6955dd732f2023724d0aa01", size = 16247425, upload-time = "2026-01-10T06:44:31.721Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6b/2b/55d980cfa2c93bd40ff4c290bf824d792bd41d2fe3487b07707559071760/numpy-2.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e7b6b5e28bbd47b7532698e5db2fe1db693d84b58c254e4389d99a27bb9b8f6b", size = 18330053, upload-time = "2026-01-10T06:44:34.617Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/23/12/8b5fc6b9c487a09a7957188e0943c9ff08432c65e34567cabc1623b03a51/numpy-2.4.1-cp314-cp314t-win32.whl", hash = "sha256:5de60946f14ebe15e713a6f22850c2372fa72f4ff9a432ab44aa90edcadaa65a", size = 6152482, upload-time = "2026-01-10T06:44:36.798Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/00/a5/9f8ca5856b8940492fc24fbe13c1bc34d65ddf4079097cf9e53164d094e1/numpy-2.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:8f085da926c0d491ffff3096f91078cc97ea67e7e6b65e490bc8dcda65663be2", size = 12627117, upload-time = "2026-01-10T06:44:38.828Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ad/0d/eca3d962f9eef265f01a8e0d20085c6dd1f443cbffc11b6dede81fd82356/numpy-2.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:6436cffb4f2bf26c974344439439c95e152c9a527013f26b3577be6c2ca64295", size = 10667121, upload-time = "2026-01-10T06:44:41.644Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1e/48/d86f97919e79314a1cdee4c832178763e6e98e623e123d0bada19e92c15a/numpy-2.4.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8ad35f20be147a204e28b6a0575fbf3540c5e5f802634d4258d55b1ff5facce1", size = 16822202, upload-time = "2026-01-10T06:44:43.738Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/51/e9/1e62a7f77e0f37dcfb0ad6a9744e65df00242b6ea37dfafb55debcbf5b55/numpy-2.4.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8097529164c0f3e32bb89412a0905d9100bf434d9692d9fc275e18dcf53c9344", size = 12569985, upload-time = "2026-01-10T06:44:45.945Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c7/7e/914d54f0c801342306fdcdce3e994a56476f1b818c46c47fc21ae968088c/numpy-2.4.1-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:ea66d2b41ca4a1630aae5507ee0a71647d3124d1741980138aa8f28f44dac36e", size = 5398484, upload-time = "2026-01-10T06:44:48.012Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1c/d8/9570b68584e293a33474e7b5a77ca404f1dcc655e40050a600dee81d27fb/numpy-2.4.1-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d3f8f0df9f4b8be57b3bf74a1d087fec68f927a2fab68231fdb442bf2c12e426", size = 6713216, upload-time = "2026-01-10T06:44:49.725Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/33/9b/9dd6e2db8d49eb24f86acaaa5258e5f4c8ed38209a4ee9de2d1a0ca25045/numpy-2.4.1-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2023ef86243690c2791fd6353e5b4848eedaa88ca8a2d129f462049f6d484696", size = 14538937, upload-time = "2026-01-10T06:44:51.498Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/53/87/d5bd995b0f798a37105b876350d346eea5838bd8f77ea3d7a48392f3812b/numpy-2.4.1-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8361ea4220d763e54cff2fbe7d8c93526b744f7cd9ddab47afeff7e14e8503be", size = 16479830, upload-time = "2026-01-10T06:44:53.931Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5b/c7/b801bf98514b6ae6475e941ac05c58e6411dd863ea92916bfd6d510b08c1/numpy-2.4.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:4f1b68ff47680c2925f8063402a693ede215f0257f02596b1318ecdfb1d79e33", size = 12492579, upload-time = "2026-01-10T06:44:57.094Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "packaging"
|
||||||
|
version = "26.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pluggy"
|
||||||
|
version = "1.6.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pydantic"
|
||||||
|
version = "2.12.5"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "annotated-types" },
|
||||||
|
{ name = "pydantic-core" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
{ name = "typing-inspection" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pydantic-core"
|
||||||
|
version = "2.41.5"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pygments"
|
||||||
|
version = "2.19.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pymysql"
|
||||||
|
version = "1.1.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f5/ae/1fe3fcd9f959efa0ebe200b8de88b5a5ce3e767e38c7ac32fb179f16a388/pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03", size = 48258, upload-time = "2025-08-24T12:55:55.146Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9", size = 45300, upload-time = "2025-08-24T12:55:53.394Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pyobvector"
|
||||||
|
version = "0.2.23"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "aiomysql" },
|
||||||
|
{ name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||||
|
{ name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
|
||||||
|
{ name = "pydantic" },
|
||||||
|
{ name = "pymysql" },
|
||||||
|
{ name = "sqlalchemy" },
|
||||||
|
{ name = "sqlglot" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/7f/14/ea82e5f70c335d2a253ae0a5f182f99abc0319511d565ec887c1d576cfb4/pyobvector-0.2.23.tar.gz", hash = "sha256:c575c84d7aef078d19f7ceeccb7240ea7371940e4e240214ed013b757fbe2b97", size = 73663, upload-time = "2026-01-29T09:29:37.197Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4f/45/29100150b64ec6c2361f11da969bf0a25f33408bae1eba0054abe315922d/pyobvector-0.2.23-py3-none-any.whl", hash = "sha256:04973247f843cbfef548b9d07989190ffc64a56d49c88bf60b3220f0841b33d3", size = 60900, upload-time = "2026-01-29T09:29:35.727Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest"
|
||||||
|
version = "9.0.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||||
|
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||||
|
{ name = "iniconfig" },
|
||||||
|
{ name = "packaging" },
|
||||||
|
{ name = "pluggy" },
|
||||||
|
{ name = "pygments" },
|
||||||
|
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest-asyncio"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" },
|
||||||
|
{ name = "pytest" },
|
||||||
|
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest-cov"
|
||||||
|
version = "7.0.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "coverage", extra = ["toml"] },
|
||||||
|
{ name = "pluggy" },
|
||||||
|
{ name = "pytest" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "python-dateutil"
|
||||||
|
version = "2.9.0.post0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "six" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rich"
|
||||||
|
version = "14.3.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "markdown-it-py" },
|
||||||
|
{ name = "pygments" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a1/84/4831f881aa6ff3c976f6d6809b58cdfa350593ffc0dc3c58f5f6586780fb/rich-14.3.1.tar.gz", hash = "sha256:b8c5f568a3a749f9290ec6bddedf835cec33696bfc1e48bcfecb276c7386e4b8", size = 230125, upload-time = "2026-01-24T21:40:44.847Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/2a/a1810c8627b9ec8c57ec5ec325d306701ae7be50235e8fd81266e002a3cc/rich-14.3.1-py3-none-any.whl", hash = "sha256:da750b1aebbff0b372557426fb3f35ba56de8ef954b3190315eb64076d6fb54e", size = 309952, upload-time = "2026-01-24T21:40:42.969Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "six"
|
||||||
|
version = "1.17.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sniffio"
|
||||||
|
version = "1.3.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlalchemy"
|
||||||
|
version = "2.0.46"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/06/aa/9ce0f3e7a9829ead5c8ce549392f33a12c4555a6c0609bb27d882e9c7ddf/sqlalchemy-2.0.46.tar.gz", hash = "sha256:cf36851ee7219c170bb0793dbc3da3e80c582e04a5437bc601bfe8c85c9216d7", size = 9865393, upload-time = "2026-01-21T18:03:45.119Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/40/26/66ba59328dc25e523bfcb0f8db48bdebe2035e0159d600e1f01c0fc93967/sqlalchemy-2.0.46-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:895296687ad06dc9b11a024cf68e8d9d3943aa0b4964278d2553b86f1b267735", size = 2155051, upload-time = "2026-01-21T18:27:28.965Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/21/cd/9336732941df972fbbfa394db9caa8bb0cf9fe03656ec728d12e9cbd6edc/sqlalchemy-2.0.46-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab65cb2885a9f80f979b85aa4e9c9165a31381ca322cbde7c638fe6eefd1ec39", size = 3234666, upload-time = "2026-01-21T18:32:28.72Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/38/62/865ae8b739930ec433cd4123760bee7f8dafdc10abefd725a025604fb0de/sqlalchemy-2.0.46-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:52fe29b3817bd191cc20bad564237c808967972c97fa683c04b28ec8979ae36f", size = 3232917, upload-time = "2026-01-21T18:44:54.064Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/24/38/805904b911857f2b5e00fdea44e9570df62110f834378706939825579296/sqlalchemy-2.0.46-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:09168817d6c19954d3b7655da6ba87fcb3a62bb575fb396a81a8b6a9fadfe8b5", size = 3185790, upload-time = "2026-01-21T18:32:30.581Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/69/4f/3260bb53aabd2d274856337456ea52f6a7eccf6cce208e558f870cec766b/sqlalchemy-2.0.46-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:be6c0466b4c25b44c5d82b0426b5501de3c424d7a3220e86cd32f319ba56798e", size = 3207206, upload-time = "2026-01-21T18:44:55.93Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ce/b3/67c432d7f9d88bb1a61909b67e29f6354d59186c168fb5d381cf438d3b73/sqlalchemy-2.0.46-cp310-cp310-win32.whl", hash = "sha256:1bc3f601f0a818d27bfe139f6766487d9c88502062a2cd3a7ee6c342e81d5047", size = 2115296, upload-time = "2026-01-21T18:33:12.498Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4a/8c/25fb284f570f9d48e6c240f0269a50cec9cf009a7e08be4c0aaaf0654972/sqlalchemy-2.0.46-cp310-cp310-win_amd64.whl", hash = "sha256:e0c05aff5c6b1bb5fb46a87e0f9d2f733f83ef6cbbbcd5c642b6c01678268061", size = 2138540, upload-time = "2026-01-21T18:33:14.22Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/69/ac/b42ad16800d0885105b59380ad69aad0cce5a65276e269ce2729a2343b6a/sqlalchemy-2.0.46-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:261c4b1f101b4a411154f1da2b76497d73abbfc42740029205d4d01fa1052684", size = 2154851, upload-time = "2026-01-21T18:27:30.54Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a0/60/d8710068cb79f64d002ebed62a7263c00c8fd95f4ebd4b5be8f7ca93f2bc/sqlalchemy-2.0.46-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:181903fe8c1b9082995325f1b2e84ac078b1189e2819380c2303a5f90e114a62", size = 3311241, upload-time = "2026-01-21T18:32:33.45Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2b/0f/20c71487c7219ab3aa7421c7c62d93824c97c1460f2e8bb72404b0192d13/sqlalchemy-2.0.46-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:590be24e20e2424a4c3c1b0835e9405fa3d0af5823a1a9fc02e5dff56471515f", size = 3310741, upload-time = "2026-01-21T18:44:57.887Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/65/80/d26d00b3b249ae000eee4db206fcfc564bf6ca5030e4747adf451f4b5108/sqlalchemy-2.0.46-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7568fe771f974abadce52669ef3a03150ff03186d8eb82613bc8adc435a03f01", size = 3263116, upload-time = "2026-01-21T18:32:35.044Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/da/ee/74dda7506640923821340541e8e45bd3edd8df78664f1f2e0aae8077192b/sqlalchemy-2.0.46-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf7e1e78af38047e08836d33502c7a278915698b7c2145d045f780201679999", size = 3285327, upload-time = "2026-01-21T18:44:59.254Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9f/25/6dcf8abafff1389a21c7185364de145107b7394ecdcb05233815b236330d/sqlalchemy-2.0.46-cp311-cp311-win32.whl", hash = "sha256:9d80ea2ac519c364a7286e8d765d6cd08648f5b21ca855a8017d9871f075542d", size = 2114564, upload-time = "2026-01-21T18:33:15.85Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/93/5f/e081490f8523adc0088f777e4ebad3cac21e498ec8a3d4067074e21447a1/sqlalchemy-2.0.46-cp311-cp311-win_amd64.whl", hash = "sha256:585af6afe518732d9ccd3aea33af2edaae4a7aa881af5d8f6f4fe3a368699597", size = 2139233, upload-time = "2026-01-21T18:33:17.528Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b6/35/d16bfa235c8b7caba3730bba43e20b1e376d2224f407c178fbf59559f23e/sqlalchemy-2.0.46-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a9a72b0da8387f15d5810f1facca8f879de9b85af8c645138cba61ea147968c", size = 2153405, upload-time = "2026-01-21T19:05:54.143Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/06/6c/3192e24486749862f495ddc6584ed730c0c994a67550ec395d872a2ad650/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2347c3f0efc4de367ba00218e0ae5c4ba2306e47216ef80d6e31761ac97cb0b9", size = 3334702, upload-time = "2026-01-21T18:46:45.384Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ea/a2/b9f33c8d68a3747d972a0bb758c6b63691f8fb8a49014bc3379ba15d4274/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9094c8b3197db12aa6f05c51c05daaad0a92b8c9af5388569847b03b1007fb1b", size = 3347664, upload-time = "2026-01-21T18:40:09.979Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/d2/3e59e2a91eaec9db7e8dc6b37b91489b5caeb054f670f32c95bcba98940f/sqlalchemy-2.0.46-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37fee2164cf21417478b6a906adc1a91d69ae9aba8f9533e67ce882f4bb1de53", size = 3277372, upload-time = "2026-01-21T18:46:47.168Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/dd/dd/67bc2e368b524e2192c3927b423798deda72c003e73a1e94c21e74b20a85/sqlalchemy-2.0.46-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b1e14b2f6965a685c7128bd315e27387205429c2e339eeec55cb75ca4ab0ea2e", size = 3312425, upload-time = "2026-01-21T18:40:11.548Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/43/82/0ecd68e172bfe62247e96cb47867c2d68752566811a4e8c9d8f6e7c38a65/sqlalchemy-2.0.46-cp312-cp312-win32.whl", hash = "sha256:412f26bb4ba942d52016edc8d12fb15d91d3cd46b0047ba46e424213ad407bcb", size = 2113155, upload-time = "2026-01-21T18:42:49.748Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bc/2a/2821a45742073fc0331dc132552b30de68ba9563230853437cac54b2b53e/sqlalchemy-2.0.46-cp312-cp312-win_amd64.whl", hash = "sha256:ea3cd46b6713a10216323cda3333514944e510aa691c945334713fca6b5279ff", size = 2140078, upload-time = "2026-01-21T18:42:51.197Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b3/4b/fa7838fe20bb752810feed60e45625a9a8b0102c0c09971e2d1d95362992/sqlalchemy-2.0.46-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:93a12da97cca70cea10d4b4fc602589c4511f96c1f8f6c11817620c021d21d00", size = 2150268, upload-time = "2026-01-21T19:05:56.621Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/46/c1/b34dccd712e8ea846edf396e00973dda82d598cb93762e55e43e6835eba9/sqlalchemy-2.0.46-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af865c18752d416798dae13f83f38927c52f085c52e2f32b8ab0fef46fdd02c2", size = 3276511, upload-time = "2026-01-21T18:46:49.022Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/96/48/a04d9c94753e5d5d096c628c82a98c4793b9c08ca0e7155c3eb7d7db9f24/sqlalchemy-2.0.46-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8d679b5f318423eacb61f933a9a0f75535bfca7056daeadbf6bd5bcee6183aee", size = 3292881, upload-time = "2026-01-21T18:40:13.089Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/be/f4/06eda6e91476f90a7d8058f74311cb65a2fb68d988171aced81707189131/sqlalchemy-2.0.46-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64901e08c33462acc9ec3bad27fc7a5c2b6491665f2aa57564e57a4f5d7c52ad", size = 3224559, upload-time = "2026-01-21T18:46:50.974Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ab/a2/d2af04095412ca6345ac22b33b89fe8d6f32a481e613ffcb2377d931d8d0/sqlalchemy-2.0.46-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e8ac45e8f4eaac0f9f8043ea0e224158855c6a4329fd4ee37c45c61e3beb518e", size = 3262728, upload-time = "2026-01-21T18:40:14.883Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/31/48/1980c7caa5978a3b8225b4d230e69a2a6538a3562b8b31cea679b6933c83/sqlalchemy-2.0.46-cp313-cp313-win32.whl", hash = "sha256:8d3b44b3d0ab2f1319d71d9863d76eeb46766f8cf9e921ac293511804d39813f", size = 2111295, upload-time = "2026-01-21T18:42:52.366Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2d/54/f8d65bbde3d877617c4720f3c9f60e99bb7266df0d5d78b6e25e7c149f35/sqlalchemy-2.0.46-cp313-cp313-win_amd64.whl", hash = "sha256:77f8071d8fbcbb2dd11b7fd40dedd04e8ebe2eb80497916efedba844298065ef", size = 2137076, upload-time = "2026-01-21T18:42:53.924Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/56/ba/9be4f97c7eb2b9d5544f2624adfc2853e796ed51d2bb8aec90bc94b7137e/sqlalchemy-2.0.46-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1e8cc6cc01da346dc92d9509a63033b9b1bda4fed7a7a7807ed385c7dccdc10", size = 3556533, upload-time = "2026-01-21T18:33:06.636Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/20/a6/b1fc6634564dbb4415b7ed6419cdfeaadefd2c39cdab1e3aa07a5f2474c2/sqlalchemy-2.0.46-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96c7cca1a4babaaf3bfff3e4e606e38578856917e52f0384635a95b226c87764", size = 3523208, upload-time = "2026-01-21T18:45:08.436Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a1/d8/41e0bdfc0f930ff236f86fccd12962d8fa03713f17ed57332d38af6a3782/sqlalchemy-2.0.46-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2a9f9aee38039cf4755891a1e50e1effcc42ea6ba053743f452c372c3152b1b", size = 3464292, upload-time = "2026-01-21T18:33:08.208Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f0/8b/9dcbec62d95bea85f5ecad9b8d65b78cc30fb0ffceeb3597961f3712549b/sqlalchemy-2.0.46-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:db23b1bf8cfe1f7fda19018e7207b20cdb5168f83c437ff7e95d19e39289c447", size = 3473497, upload-time = "2026-01-21T18:45:10.552Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e9/f8/5ecdfc73383ec496de038ed1614de9e740a82db9ad67e6e4514ebc0708a3/sqlalchemy-2.0.46-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:56bdd261bfd0895452006d5316cbf35739c53b9bb71a170a331fa0ea560b2ada", size = 2152079, upload-time = "2026-01-21T19:05:58.477Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/bf/eba3036be7663ce4d9c050bc3d63794dc29fbe01691f2bf5ccb64e048d20/sqlalchemy-2.0.46-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33e462154edb9493f6c3ad2125931e273bbd0be8ae53f3ecd1c161ea9a1dd366", size = 3272216, upload-time = "2026-01-21T18:46:52.634Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/05/45/1256fb597bb83b58a01ddb600c59fe6fdf0e5afe333f0456ed75c0f8d7bd/sqlalchemy-2.0.46-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bcdce05f056622a632f1d44bb47dbdb677f58cad393612280406ce37530eb6d", size = 3277208, upload-time = "2026-01-21T18:40:16.38Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d9/a0/2053b39e4e63b5d7ceb3372cface0859a067c1ddbd575ea7e9985716f771/sqlalchemy-2.0.46-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e84b09a9b0f19accedcbeff5c2caf36e0dd537341a33aad8d680336152dc34e", size = 3221994, upload-time = "2026-01-21T18:46:54.622Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1e/87/97713497d9502553c68f105a1cb62786ba1ee91dea3852ae4067ed956a50/sqlalchemy-2.0.46-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4f52f7291a92381e9b4de9050b0a65ce5d6a763333406861e33906b8aa4906bf", size = 3243990, upload-time = "2026-01-21T18:40:18.253Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a8/87/5d1b23548f420ff823c236f8bea36b1a997250fd2f892e44a3838ca424f4/sqlalchemy-2.0.46-cp314-cp314-win32.whl", hash = "sha256:70ed2830b169a9960193f4d4322d22be5c0925357d82cbf485b3369893350908", size = 2114215, upload-time = "2026-01-21T18:42:55.232Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3a/20/555f39cbcf0c10cf452988b6a93c2a12495035f68b3dbd1a408531049d31/sqlalchemy-2.0.46-cp314-cp314-win_amd64.whl", hash = "sha256:3c32e993bc57be6d177f7d5d31edb93f30726d798ad86ff9066d75d9bf2e0b6b", size = 2139867, upload-time = "2026-01-21T18:42:56.474Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3e/f0/f96c8057c982d9d8a7a68f45d69c674bc6f78cad401099692fe16521640a/sqlalchemy-2.0.46-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4dafb537740eef640c4d6a7c254611dca2df87eaf6d14d6a5fca9d1f4c3fc0fa", size = 3561202, upload-time = "2026-01-21T18:33:10.337Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d7/53/3b37dda0a5b137f21ef608d8dfc77b08477bab0fe2ac9d3e0a66eaeab6fc/sqlalchemy-2.0.46-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42a1643dc5427b69aca967dae540a90b0fbf57eaf248f13a90ea5930e0966863", size = 3526296, upload-time = "2026-01-21T18:45:12.657Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/33/75/f28622ba6dde79cd545055ea7bd4062dc934e0621f7b3be2891f8563f8de/sqlalchemy-2.0.46-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ff33c6e6ad006bbc0f34f5faf941cfc62c45841c64c0a058ac38c799f15b5ede", size = 3470008, upload-time = "2026-01-21T18:33:11.725Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a9/42/4afecbbc38d5e99b18acef446453c76eec6fbd03db0a457a12a056836e22/sqlalchemy-2.0.46-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:82ec52100ec1e6ec671563bbd02d7c7c8d0b9e71a0723c72f22ecf52d1755330", size = 3476137, upload-time = "2026-01-21T18:45:15.001Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fc/a1/9c4efa03300926601c19c18582531b45aededfb961ab3c3585f1e24f120b/sqlalchemy-2.0.46-py3-none-any.whl", hash = "sha256:f9c11766e7e7c0a2767dda5acb006a118640c9fc0a4104214b96269bfb78399e", size = 1937882, upload-time = "2026-01-21T18:22:10.456Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlglot"
|
||||||
|
version = "28.7.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/0a/3d/aec874eb15ed31d73244aa13c8bbb395de90980bc281539f63f1a3537fd0/sqlglot-28.7.0.tar.gz", hash = "sha256:125f8d41721543e8a503bbe08dbaa9a7ce11bf6b96c052fcb819bea8ca5e3b7e", size = 5717197, upload-time = "2026-01-30T12:47:35.772Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d7/e9/6251e703f7314de9508c1bcf9e8cfa5d603bebd6d96428467ef6d81539ce/sqlglot-28.7.0-py3-none-any.whl", hash = "sha256:cb1c5cb85fa9b8b49738959859590ed22d095d4f65aa1f60c3a0d2b254984569", size = 595253, upload-time = "2026-01-30T12:47:34.018Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tomli"
|
||||||
|
version = "2.4.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tqdm"
|
||||||
|
version = "4.67.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/27/89/4b0001b2dab8df0a5ee2787dcbe771de75ded01f18f1f8d53dedeea2882b/tqdm-4.67.2.tar.gz", hash = "sha256:649aac53964b2cb8dec76a14b405a4c0d13612cb8933aae547dd144eacc99653", size = 169514, upload-time = "2026-01-30T23:12:06.555Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f5/e2/31eac96de2915cf20ccaed0225035db149dfb9165a9ed28d4b252ef3f7f7/tqdm-4.67.2-py3-none-any.whl", hash = "sha256:9a12abcbbff58b6036b2167d9d3853042b9d436fe7330f06ae047867f2f8e0a7", size = 78354, upload-time = "2026-01-30T23:12:04.368Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typing-extensions"
|
||||||
|
version = "4.15.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typing-inspection"
|
||||||
|
version = "0.4.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "urllib3"
|
||||||
|
version = "2.6.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
|
||||||
|
]
|
||||||
Reference in New Issue
Block a user