From 40e5e281f9fbc0945ae402d9b3749c94f3ae666b Mon Sep 17 00:00:00 2001 From: Luke Hinds Date: Mon, 13 Jan 2025 12:18:05 +0000 Subject: [PATCH 01/11] Log User-agents from clients Had this branch hanging around and after chat with Jakub we figured its worth having for when we need to figure out client providers --- src/codegate/providers/copilot/provider.py | 2 ++ src/codegate/server.py | 11 ++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/codegate/providers/copilot/provider.py b/src/codegate/providers/copilot/provider.py index f8bfeb1e..fd3c9971 100644 --- a/src/codegate/providers/copilot/provider.py +++ b/src/codegate/providers/copilot/provider.py @@ -236,6 +236,8 @@ def get_headers_dict(self) -> Dict[str, str]: try: name, value = header.decode("utf-8").split(":", 1) headers_dict[name.strip().lower()] = value.strip() + if name == "user-agent": + logger.debug(f"User-Agent header received: {value} from {self.peername}") except ValueError: continue diff --git a/src/codegate/server.py b/src/codegate/server.py index 57206712..88e14ea7 100644 --- a/src/codegate/server.py +++ b/src/codegate/server.py @@ -1,7 +1,7 @@ import traceback import structlog -from fastapi import APIRouter, FastAPI +from fastapi import APIRouter, FastAPI, Request from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import JSONResponse from starlette.middleware.errors import ServerErrorMiddleware @@ -35,6 +35,15 @@ def init_app(pipeline_factory: PipelineFactory) -> FastAPI: description=__description__, version=__version__, ) + + @app.middleware("http") + async def log_user_agent(request: Request, call_next): + user_agent = request.headers.get("user-agent") + client_host = request.client.host if request.client else "unknown" + logger.debug(f"User-Agent header received: {user_agent} from {client_host}") + response = await call_next(request) + return response + app.add_middleware( CORSMiddleware, allow_origins=["*"], From c06e870ff85073a7c06b803d9494fe7ba0007b92 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Jan 2025 04:58:01 +0000 Subject: [PATCH 02/11] Bump ollama from 0.4.5 to 0.4.6 Bumps [ollama](https://github.com/ollama/ollama-python) from 0.4.5 to 0.4.6. - [Release notes](https://github.com/ollama/ollama-python/releases) - [Commits](https://github.com/ollama/ollama-python/compare/v0.4.5...v0.4.6) --- updated-dependencies: - dependency-name: ollama dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index dec3e2a6..b8cd8fab 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1525,13 +1525,13 @@ files = [ [[package]] name = "ollama" -version = "0.4.5" +version = "0.4.6" description = "The official Python client for Ollama." optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "ollama-0.4.5-py3-none-any.whl", hash = "sha256:74936de89a41c87c9745f09f2e1db964b4783002188ac21241bfab747f46d925"}, - {file = "ollama-0.4.5.tar.gz", hash = "sha256:e7fb71a99147046d028ab8b75e51e09437099aea6f8f9a0d91a71f787e97439e"}, + {file = "ollama-0.4.6-py3-none-any.whl", hash = "sha256:cbb4ebe009e10dd12bdd82508ab415fd131945e185753d728a7747c9ebe762e9"}, + {file = "ollama-0.4.6.tar.gz", hash = "sha256:b00717651c829f96094ed4231b9f0d87e33cc92dc235aca50aeb5a2a4e6e95b7"}, ] [package.dependencies] From d06f04c19587501442d66d519efe8e0470934680 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Jan 2025 04:58:24 +0000 Subject: [PATCH 03/11] Bump litellm from 1.57.10 to 1.58.0 Bumps [litellm](https://github.com/BerriAI/litellm) from 1.57.10 to 1.58.0. - [Release notes](https://github.com/BerriAI/litellm/releases) - [Commits](https://github.com/BerriAI/litellm/compare/v1.57.10...v1.58.0) --- updated-dependencies: - dependency-name: litellm dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index dec3e2a6..af43f9f7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1194,13 +1194,13 @@ referencing = ">=0.31.0" [[package]] name = "litellm" -version = "1.57.10" +version = "1.58.0" description = "Library to easily interface with LLM API providers" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "litellm-1.57.10-py3-none-any.whl", hash = "sha256:68c37378a9b5d892c5310512b99b9d261c6e0b95fa7200b50945742344814117"}, - {file = "litellm-1.57.10.tar.gz", hash = "sha256:eeb7dd6b228a0124e8e4d445de839af2fb0399fb4207a05a11a3a828d35aeeda"}, + {file = "litellm-1.58.0-py3-none-any.whl", hash = "sha256:1fc07646f6419f1d7b7d06fe2f5c72b3e6e3407423b50cbf45b58dc9740e7a03"}, + {file = "litellm-1.58.0.tar.gz", hash = "sha256:db4512e987809e04e59d5b4240bdef3a4bfe575c332397f80cc6b4403c68120a"}, ] [package.dependencies] @@ -3056,4 +3056,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.12,<4.0" -content-hash = "2dffc7b22c74e6dd3c01c153aa6fcf18b7a0fe989eae0cf409ed43d182b706e9" +content-hash = "68009f4e7fd6e672f2ba98cfb744a6bb4469f6d05b0266b817eef2ed4cf9aa18" diff --git a/pyproject.toml b/pyproject.toml index d719f20c..b520c9f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,7 @@ PyYAML = ">=6.0.1" fastapi = ">=0.115.5" uvicorn = ">=0.32.1" structlog = ">=24.4.0" -litellm = "^1.57.10" +litellm = "^1.58.0" llama_cpp_python = ">=0.3.2" cryptography = "^44.0.0" sqlalchemy = "^2.0.37" From 07919502ff8aedcdb16e2ed9a4b98a0154649d07 Mon Sep 17 00:00:00 2001 From: Alejandro Ponce Date: Tue, 14 Jan 2025 10:34:08 +0200 Subject: [PATCH 04/11] Add build-arg in `make image-build` command The build-arg prevents that the step for downloading the UI code is cached. This way we can ensure that we are always using the latest releasee from the UI when building the image locally. The build-arg is already present in the GH action to build and publish the image. Additionally, remove the `secret` provided to the GH action to build the image. It was used when the containers were not public --- .github/workflows/image-build.yml | 2 -- .github/workflows/image-publish.yml | 2 -- Makefile | 10 ++++++++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/workflows/image-build.yml b/.github/workflows/image-build.yml index 1b3b22eb..1f826796 100644 --- a/.github/workflows/image-build.yml +++ b/.github/workflows/image-build.yml @@ -35,7 +35,5 @@ jobs: load: true cache-from: type=gha cache-to: type=gha,mode=max - secrets: | - gh_token=${{ secrets.GH_CI_TOKEN }} build-args: | LATEST_RELEASE=${{ env.LATEST_RELEASE }} diff --git a/.github/workflows/image-publish.yml b/.github/workflows/image-publish.yml index 3a96db13..c51666e3 100644 --- a/.github/workflows/image-publish.yml +++ b/.github/workflows/image-publish.yml @@ -85,8 +85,6 @@ jobs: labels: ${{ steps.docker-metadata.outputs.labels }} cache-from: type=gha cache-to: type=gha,mode=max - secrets: | - gh_token=${{ secrets.GH_CI_TOKEN }} build-args: | LATEST_RELEASE=${{ env.LATEST_RELEASE }} - name: Capture Image Digest diff --git a/Makefile b/Makefile index a6bae65e..6ab6bb34 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ .PHONY: clean install format lint test security build all CONTAINER_BUILD?=docker buildx build -VER?=0.1.0 +VER?=0.1.7 clean: rm -rf build/ @@ -30,6 +30,12 @@ build: clean test poetry build image-build: - DOCKER_BUILDKIT=1 $(CONTAINER_BUILD) -f Dockerfile --secret id=gh_token,env=GH_CI_TOKEN -t codegate . -t ghcr.io/stacklok/codegate:$(VER) --load + DOCKER_BUILDKIT=1 $(CONTAINER_BUILD) \ + -f Dockerfile \ + --build-arg LATEST_RELEASE=$(curl -s "https://api.github.com/repos/stacklok/codegate-ui/releases/latest" | grep '"zipball_url":' | cut -d '"' -f 4) \ + -t codegate \ + . \ + -t ghcr.io/stacklok/codegate:$(VER) \ + --load all: clean install format lint test security build From 20633f17a8122ae8cedb86f91cb1eedc4559ae06 Mon Sep 17 00:00:00 2001 From: Alejandro Ponce Date: Tue, 14 Jan 2025 12:51:30 +0200 Subject: [PATCH 05/11] fix: Do not double count secrets on `/explain` copilot function Closes: #519 When using `/explain` function in copilot we were double counting th secrets. The problem was that we were getting several `user` messages after the last `assistant` message. We are using the last `assistant` message as means to identify the user messages. Here is an example of how the request looked like for `/explain`: ``` [ {"role": "assistant", "content": "some content"}, {"role": "user", "content": content_with_secrets}, {"role": "user", "content": content_with_secrets}, ] ``` To avoid double counting now we check which was the secrets that matched after the last `assistant` message and only consider the unique ones. --- src/codegate/pipeline/secrets/secrets.py | 26 ++++++++++++------------ tests/pipeline/secrets/test_secrets.py | 12 +++++------ 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/src/codegate/pipeline/secrets/secrets.py b/src/codegate/pipeline/secrets/secrets.py index 43ec17a8..d260f025 100644 --- a/src/codegate/pipeline/secrets/secrets.py +++ b/src/codegate/pipeline/secrets/secrets.py @@ -106,10 +106,10 @@ def _get_surrounding_secret_lines( end_line = min(secret_line + surrounding_lines, len(lines)) return "\n".join(lines[start_line:end_line]) - def obfuscate(self, text: str) -> tuple[str, int]: + def obfuscate(self, text: str) -> tuple[str, List[Match]]: matches = CodegateSignatures.find_in_string(text) if not matches: - return text, 0 + return text, [] logger.debug(f"Found {len(matches)} secrets in the user message") @@ -133,16 +133,16 @@ def obfuscate(self, text: str) -> tuple[str, int]: protected_text = list(text) # Store matches for logging - found_secrets = 0 + found_secrets = [] # First pass. Replace each match with its encrypted value - logger.info("\nFound secrets:") + logger.info(f"\nFound {len(absolute_matches)} secrets:") for start, end, match in absolute_matches: hidden_secret = self._hide_secret(match) # Replace the secret in the text protected_text[start:end] = hidden_secret - found_secrets += 1 + found_secrets.append(match) # Log the findings logger.info( f"\nService: {match.service}" @@ -228,7 +228,7 @@ def name(self) -> str: def _redact_text( self, text: str, secrets_manager: SecretsManager, session_id: str, context: PipelineContext - ) -> tuple[str, int]: + ) -> tuple[str, List[Match]]: """ Find and encrypt secrets in the given text. @@ -269,7 +269,7 @@ async def process( raise ValueError("Session ID not found in context") new_request = request.copy() - total_redacted = 0 + total_matches = [] # Process all messages last_assistant_idx = -1 @@ -281,15 +281,18 @@ async def process( for i, message in enumerate(new_request["messages"]): if "content" in message and message["content"]: # Protect the text - protected_string, redacted_count = self._redact_text( + protected_string, secrets_matched = self._redact_text( str(message["content"]), secrets_manager, session_id, context ) new_request["messages"][i]["content"] = protected_string - # Sum redacted count for messages after the last assistant message + # Append the matches for messages after the last assistant message if i > last_assistant_idx: - total_redacted += redacted_count + total_matches += secrets_matched + # Not count repeated secret matches + set_secrets_value = set(match.value for match in total_matches) + total_redacted = len(set_secrets_value) context.secrets_found = total_redacted > 0 logger.info(f"Total secrets redacted since last assistant message: {total_redacted}") @@ -362,7 +365,6 @@ async def process_chunk( if match: # Found a complete marker, process it encrypted_value = match.group(1) - print("----> encrypted_value: ", encrypted_value) original_value = input_context.sensitive.manager.get_original_value( encrypted_value, input_context.sensitive.session_id, @@ -371,8 +373,6 @@ async def process_chunk( if original_value is None: # If value not found, leave as is original_value = match.group(0) # Keep the REDACTED marker - else: - print("----> original_value: ", original_value) # Post an alert with the redacted content input_context.add_alert(self.name, trigger_string=encrypted_value) diff --git a/tests/pipeline/secrets/test_secrets.py b/tests/pipeline/secrets/test_secrets.py index 71b43f8f..29cc94c3 100644 --- a/tests/pipeline/secrets/test_secrets.py +++ b/tests/pipeline/secrets/test_secrets.py @@ -97,9 +97,9 @@ def test_hide_secret(self): def test_obfuscate(self): # Test text with a secret text = "API_KEY=AKIAIOSFODNN7EXAMPLE\nOther text" - protected, count = self.encryptor.obfuscate(text) + protected, matched_secrets = self.encryptor.obfuscate(text) - assert count == 1 + assert len(matched_secrets) == 1 assert "REDACTED<$" in protected assert "AKIAIOSFODNN7EXAMPLE" not in protected assert "Other text" in protected @@ -128,9 +128,9 @@ def test_hide_secret(self): def test_obfuscate(self): # Test text with multiple secrets text = "API_KEY=AKIAIOSFODNN7EXAMPLE\nPASSWORD=AKIAIOSFODNN7EXAMPLE" - protected, count = self.obfuscator.obfuscate(text) + protected, matched_secrets = self.obfuscator.obfuscate(text) - assert count == 2 + assert len(matched_secrets) == 2 assert "AKIAIOSFODNN7EXAMPLE" not in protected assert "*" * 32 in protected @@ -140,9 +140,9 @@ def test_obfuscate(self): def test_obfuscate_no_secrets(self): text = "Regular text without secrets" - protected, count = self.obfuscator.obfuscate(text) + protected, matched_secrets = self.obfuscator.obfuscate(text) - assert count == 0 + assert len(matched_secrets) == 0 assert protected == text From 70fbc6e1bef18b685e1f631b333d540b1a34abdd Mon Sep 17 00:00:00 2001 From: Michelangelo Mori <328978+blkt@users.noreply.github.com> Date: Tue, 14 Jan 2025 17:25:33 +0100 Subject: [PATCH 06/11] Fixed a couple typos. (#584) --- README.md | 2 +- scripts/entrypoint.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index a0ace02b..df5364ce 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ With Continue, you can choose from several leading AI model providers: 🔮 Many more on the way! -- **[Aider](https://aider.chat) +- **[Aider](https://aider.chat)** With Aider, you can choose from two leading AI model providers: diff --git a/scripts/entrypoint.sh b/scripts/entrypoint.sh index 22fbed2a..90515787 100755 --- a/scripts/entrypoint.sh +++ b/scripts/entrypoint.sh @@ -21,7 +21,7 @@ restore_backup() { fi } -genrerate_certs() { +generate_certs() { echo "Generating certificates..." python -m src.codegate.cli generate-certs --certs-out-dir "$CODEGATE_CERTS" } @@ -61,7 +61,7 @@ echo "Initializing entrypoint script..." restore_backup # Step 2: Generate certificates -genrerate_certs +generate_certs # Step 3: Start the dashboard start_dashboard From bcb26ab103babe31fdc9fe35e0bf7f7c361c5416 Mon Sep 17 00:00:00 2001 From: Juan Antonio Osorio Date: Wed, 15 Jan 2025 07:59:36 +0200 Subject: [PATCH 07/11] Add all-in-one minimal nginx config The nginx configuration uses a lot of defaults that work well in server-like environments. However, this is not optimized for a local dev-environment or single-container as codegate is meant to run. The nginx server spawns several processes which mostly remain idle. This is wasteful and it is taking over resources that could be used for something else. The intention of this new configuration is to minimize footprint. Signed-off-by: Juan Antonio Osorio --- Dockerfile | 2 +- nginx.conf | 78 +++++++++++++++++++++++++++++++++++++++++++----------- 2 files changed, 63 insertions(+), 17 deletions(-) diff --git a/Dockerfile b/Dockerfile index 8983fc08..761cbdfa 100644 --- a/Dockerfile +++ b/Dockerfile @@ -78,7 +78,7 @@ RUN chown -R codegate /var/lib/nginx && \ chown -R codegate /var/log/nginx && \ chown -R codegate /run -COPY nginx.conf /etc/nginx/conf.d/default.conf +COPY nginx.conf /etc/nginx/nginx.conf # Remove include /etc/nginx/sites-enabled/*; from the default nginx.conf # This way we don't introduce unnecessary configurations nor serve diff --git a/nginx.conf b/nginx.conf index b46e56e9..87259672 100644 --- a/nginx.conf +++ b/nginx.conf @@ -1,22 +1,68 @@ -server { - listen 9090; +worker_processes 1; +pid /run/nginx.pid; +error_log /var/log/nginx/error.log; +include /etc/nginx/modules-enabled/*.conf; - server_name localhost; +events { + worker_connections 128; +} + +http { + + ## + # Basic Settings + ## + + sendfile on; + tcp_nopush on; + types_hash_max_size 2048; + + ## + # Disable unnecessary features + ## + + server_tokens off; + autoindex off; + + include /etc/nginx/mime.types; + default_type application/octet-stream; + + ## + # Logging Settings + ## + + access_log off; + error_log /var/log/nginx/error.log; + + ## + # Gzip Settings + ## + + gzip on; + + server { + listen 9090; + + server_name localhost; - root /var/www/html; - index index.html; + add_header X-Frame-Options SAMEORIGIN; + add_header X-Content-Type-Options nosniff; + add_header X-XSS-Protection "1; mode=block"; - location / { - try_files $uri /index.html =404; - } + root /var/www/html; + index index.html; - # Serve certificates from /app/codegate_volume/certs at /certificates - location /certificates/codegate_ca.crt { - alias /app/codegate_volume/certs/ca.crt; - types { application/x-x509-ca-cert crt; } - default_type application/x-x509-ca-cert; - } + location / { + try_files $uri /index.html =404; + expires 1h; # Cache files for 1 hour + add_header Cache-Control "public, max-age=3600"; + } - error_log /var/log/nginx/error.log; - access_log /var/log/nginx/access.log; + # Serve certificates from /app/codegate_volume/certs at /certificates + location /certificates/codegate_ca.crt { + alias /app/codegate_volume/certs/ca.crt; + types { application/x-x509-ca-cert crt; } + default_type application/x-x509-ca-cert; + } + } } From 9434f78760d8d98e79d93030f56ff5f861da00ca Mon Sep 17 00:00:00 2001 From: Juan Antonio Osorio Date: Wed, 15 Jan 2025 08:38:50 +0200 Subject: [PATCH 08/11] Use `latest` tag to build local codegate container There is no use in keeping this variable as it's only used for the container tag. Releases use another tagging mechanism. Signed-off-by: Juan Antonio Osorio --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 6ab6bb34..6db025f5 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,7 @@ .PHONY: clean install format lint test security build all CONTAINER_BUILD?=docker buildx build -VER?=0.1.7 +# This is the container tag. Only used for development purposes. +VER?=latest clean: rm -rf build/ From a01e202788fca0170fad7bc21143693779e5204e Mon Sep 17 00:00:00 2001 From: Juan Antonio Osorio Date: Wed, 15 Jan 2025 09:01:11 +0200 Subject: [PATCH 09/11] Get version dynamically as opposed to the hardcoded one we have now This leverages the package metadata instead of overwriting it with a hardcoded version and description. Signed-off-by: Juan Antonio Osorio --- src/codegate/__init__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/codegate/__init__.py b/src/codegate/__init__.py index 7b8aeefe..106a1d9d 100644 --- a/src/codegate/__init__.py +++ b/src/codegate/__init__.py @@ -14,9 +14,6 @@ __version__ = "unknown" __description__ = "codegate" -__version__ = "0.1.7" -__description__ = "A configurable service gateway" - __all__ = ["Config", "ConfigurationError", "LogFormat", "LogLevel", "setup_logging"] # Set up null handler to avoid "No handler found" warnings. From dd482a21e89b33a7289a88658fe9047423c0c798 Mon Sep 17 00:00:00 2001 From: Alejandro Ponce Date: Wed, 15 Jan 2025 12:27:42 +0200 Subject: [PATCH 10/11] feat: Introduce DB migrations Related: #583 We had been using a single DB schema that didn't change until now. This introduces migrations using `alembic`. To create a new migration one can use: ```sh alembic revision -m "My migration" ``` That should generate an empty migration file that needs to be hand-filled. Specifically the `upgrade` method which will be the one executed when running the migration. ```python """My migration Revision ID: Revises: Create Date: YYYY-MM-DD HH:MM:SS.XXXXXX """ from alembic import op import sqlalchemy as sa revision = '' down_revision = '' branch_labels = None depends_on = None def upgrade(): pass def downgrade(): pass ``` --- alembic.ini | 82 ++++++++++++++++++++ migrations/README | 1 + migrations/env.py | 72 ++++++++++++++++++ migrations/script.py.mako | 26 +++++++ migrations/versions/30d0144e1a50_init_db.py | 83 +++++++++++++++++++++ poetry.lock | 44 ++++++++++- pyproject.toml | 1 + sql/schema/schema.sql | 48 ------------ src/codegate/db/connection.py | 55 +++++--------- 9 files changed, 327 insertions(+), 85 deletions(-) create mode 100644 alembic.ini create mode 100644 migrations/README create mode 100644 migrations/env.py create mode 100644 migrations/script.py.mako create mode 100644 migrations/versions/30d0144e1a50_init_db.py delete mode 100644 sql/schema/schema.sql diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 00000000..5da2bb31 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,82 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = sqlite:///codegate_volume/db/codegate.db + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME diff --git a/migrations/README b/migrations/README new file mode 100644 index 00000000..98e4f9c4 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 00000000..dd2e9d0f --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,72 @@ +from alembic import context +from sqlalchemy import engine_from_config, pool + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +# if config.config_file_name is not None: +# fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + print(config.config_ini_section) + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 00000000..fbc4b07d --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/30d0144e1a50_init_db.py b/migrations/versions/30d0144e1a50_init_db.py new file mode 100644 index 00000000..009c84c7 --- /dev/null +++ b/migrations/versions/30d0144e1a50_init_db.py @@ -0,0 +1,83 @@ +"""init db + +Revision ID: 30d0144e1a50 +Revises: +Create Date: 2025-01-15 09:30:00.490697 + +""" + +from typing import Sequence, Union + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "30d0144e1a50" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Schema for codegate database using SQLite + # Prompts table + op.execute( + """ + CREATE TABLE prompts ( + id TEXT PRIMARY KEY, -- UUID stored as TEXT + timestamp DATETIME NOT NULL, + provider TEXT, -- VARCHAR(255) + request TEXT NOT NULL, -- Record the full request that arrived to the server + type TEXT NOT NULL -- VARCHAR(50) (e.g. "fim", "chat") + ); + """ + ) + # Outputs table + op.execute( + """ + CREATE TABLE outputs ( + id TEXT PRIMARY KEY, -- UUID stored as TEXT + prompt_id TEXT NOT NULL, + timestamp DATETIME NOT NULL, + output TEXT NOT NULL, -- Record the full response. If stream will be a list of objects + FOREIGN KEY (prompt_id) REFERENCES prompts(id) + ); + """ + ) + # Alerts table + op.execute( + """ + CREATE TABLE alerts ( + id TEXT PRIMARY KEY, -- UUID stored as TEXT + prompt_id TEXT NOT NULL, + code_snippet TEXT, + trigger_string TEXT, -- VARCHAR(255) + trigger_type TEXT NOT NULL, -- VARCHAR(50) + trigger_category TEXT, + timestamp DATETIME NOT NULL, + FOREIGN KEY (prompt_id) REFERENCES prompts(id) + ); + """ + ) + # Settings table + op.execute( + """ + CREATE TABLE settings ( + id TEXT PRIMARY KEY, -- UUID stored as TEXT + ip TEXT, -- VARCHAR(45) + port INTEGER, + llm_model TEXT, -- VARCHAR(255) + system_prompt TEXT, + other_settings TEXT -- JSON stored as TEXT + ); + """ + ) + # Create indexes for foreign keys and frequently queried columns + op.execute("CREATE INDEX idx_outputs_prompt_id ON outputs(prompt_id);") + op.execute("CREATE INDEX idx_alerts_prompt_id ON alerts(prompt_id);") + op.execute("CREATE INDEX idx_prompts_timestamp ON prompts(timestamp);") + op.execute("CREATE INDEX idx_outputs_timestamp ON outputs(timestamp);") + op.execute("CREATE INDEX idx_alerts_timestamp ON alerts(timestamp);") + + +def downgrade() -> None: + pass diff --git a/poetry.lock b/poetry.lock index abc48757..8186038d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -140,6 +140,25 @@ typing_extensions = ">=4.0" dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"] docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"] +[[package]] +name = "alembic" +version = "1.14.0" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, + {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + [[package]] name = "annotated-types" version = "0.7.0" @@ -585,6 +604,7 @@ files = [ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, + {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, @@ -595,6 +615,7 @@ files = [ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, + {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, @@ -1242,6 +1263,25 @@ dev = ["black (>=23.3.0)", "httpx (>=0.24.1)", "mkdocs (>=1.4.3)", "mkdocs-mater server = ["PyYAML (>=5.1)", "fastapi (>=0.100.0)", "pydantic-settings (>=2.0.1)", "sse-starlette (>=1.6.1)", "starlette-context (>=0.3.6,<0.4)", "uvicorn (>=0.22.0)"] test = ["fastapi (>=0.100.0)", "httpx (>=0.24.1)", "huggingface-hub (>=0.23.0)", "pydantic-settings (>=2.0.1)", "pytest (>=7.4.0)", "scipy (>=1.10)", "sse-starlette (>=1.6.1)", "starlette-context (>=0.3.6,<0.4)"] +[[package]] +name = "mako" +version = "1.3.8" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.8-py3-none-any.whl", hash = "sha256:42f48953c7eb91332040ff567eb7eea69b22e7a4affbc5ba8e845e8f730f6627"}, + {file = "mako-1.3.8.tar.gz", hash = "sha256:577b97e414580d3e088d47c2dbbe9594aa7a5146ed2875d4dfa9075af2dd3cc8"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -3056,4 +3096,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.12,<4.0" -content-hash = "68009f4e7fd6e672f2ba98cfb744a6bb4469f6d05b0266b817eef2ed4cf9aa18" +content-hash = "abbc4a3d469d8bc3061e8b2a261a8195da30e3bd57484ed79c69a192edf6a2e7" diff --git a/pyproject.toml b/pyproject.toml index b520c9f5..f1082cc5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,7 @@ tree-sitter-javascript = ">=0.23.1" tree-sitter-python = ">=0.23.6" tree-sitter-rust = ">=0.23.2" sqlite-vec-sl-tmp = "^0.0.4" +alembic = ">=1.14.0" [tool.poetry.group.dev.dependencies] pytest = ">=7.4.0" diff --git a/sql/schema/schema.sql b/sql/schema/schema.sql deleted file mode 100644 index 90e23d0d..00000000 --- a/sql/schema/schema.sql +++ /dev/null @@ -1,48 +0,0 @@ --- Schema for codegate database using SQLite - --- Prompts table -CREATE TABLE prompts ( - id TEXT PRIMARY KEY, -- UUID stored as TEXT - timestamp DATETIME NOT NULL, - provider TEXT, -- VARCHAR(255) - request TEXT NOT NULL, -- Record the full request that arrived to the server - type TEXT NOT NULL -- VARCHAR(50) (e.g. "fim", "chat") -); - --- Outputs table -CREATE TABLE outputs ( - id TEXT PRIMARY KEY, -- UUID stored as TEXT - prompt_id TEXT NOT NULL, - timestamp DATETIME NOT NULL, - output TEXT NOT NULL, -- Record the full response. If it was stream will be a list of objects. - FOREIGN KEY (prompt_id) REFERENCES prompts(id) -); - --- Alerts table -CREATE TABLE alerts ( - id TEXT PRIMARY KEY, -- UUID stored as TEXT - prompt_id TEXT NOT NULL, - code_snippet TEXT, -- We check in code that not both code_snippet and trigger_string are NULL - trigger_string TEXT, -- VARCHAR(255) - trigger_type TEXT NOT NULL, -- VARCHAR(50) - trigger_category TEXT, - timestamp DATETIME NOT NULL, - FOREIGN KEY (prompt_id) REFERENCES prompts(id) -); - --- Settings table -CREATE TABLE settings ( - id TEXT PRIMARY KEY, -- UUID stored as TEXT - ip TEXT, -- VARCHAR(45) - port INTEGER, - llm_model TEXT, -- VARCHAR(255) - system_prompt TEXT, - other_settings TEXT -- JSON stored as TEXT -); - --- Create indexes for foreign keys and frequently queried columns -CREATE INDEX idx_outputs_prompt_id ON outputs(prompt_id); -CREATE INDEX idx_alerts_prompt_id ON alerts(prompt_id); -CREATE INDEX idx_prompts_timestamp ON prompts(timestamp); -CREATE INDEX idx_outputs_timestamp ON outputs(timestamp); -CREATE INDEX idx_alerts_timestamp ON alerts(timestamp); diff --git a/src/codegate/db/connection.py b/src/codegate/db/connection.py index 443ab008..4894ad2a 100644 --- a/src/codegate/db/connection.py +++ b/src/codegate/db/connection.py @@ -4,8 +4,11 @@ from typing import List, Optional, Type import structlog +from alembic import command as alembic_command +from alembic.config import Config as AlembicConfig from pydantic import BaseModel from sqlalchemy import TextClause, text +from sqlalchemy.exc import OperationalError from sqlalchemy.ext.asyncio import create_async_engine from codegate.db.fim_cache import FimCache @@ -34,7 +37,7 @@ def __init__(self, sqlite_path: Optional[str] = None): ) # type: ignore self._db_path = Path(sqlite_path).absolute() # type: ignore self._db_path.parent.mkdir(parents=True, exist_ok=True) - logger.debug(f"Initializing DB from path: {self._db_path}") + logger.debug(f"Connecting to DB from path: {self._db_path}") engine_dict = { "url": f"sqlite+aiosqlite:///{self._db_path}", "echo": False, # Set to False in production @@ -51,38 +54,6 @@ class DbRecorder(DbCodeGate): def __init__(self, sqlite_path: Optional[str] = None): super().__init__(sqlite_path) - if not self.does_db_exist(): - logger.info(f"Database does not exist at {self._db_path}. Creating..") - asyncio.run(self.init_db()) - - async def init_db(self): - """Initialize the database with the schema.""" - if self.does_db_exist(): - logger.info("Database already exists. Skipping initialization.") - return - - # Get the absolute path to the schema file - current_dir = Path(__file__).parent - schema_path = current_dir.parent.parent.parent / "sql" / "schema" / "schema.sql" - - if not schema_path.exists(): - raise FileNotFoundError(f"Schema file not found at {schema_path}") - - # Read the schema - with open(schema_path, "r") as f: - schema = f.read() - - try: - # Execute the schema - async with self._async_db_engine.begin() as conn: - # Split the schema into individual statements and execute each one - statements = [stmt.strip() for stmt in schema.split(";") if stmt.strip()] - for statement in statements: - # Use SQLAlchemy text() to create executable SQL statements - await conn.execute(text(statement)) - finally: - await self._async_db_engine.dispose() - async def _execute_update_pydantic_model( self, model: BaseModel, sql_command: TextClause ) -> Optional[BaseModel]: @@ -318,8 +289,22 @@ async def get_alerts_with_prompt_and_output(self) -> List[GetAlertsWithPromptAnd def init_db_sync(db_path: Optional[str] = None): """DB will be initialized in the constructor in case it doesn't exist.""" - db = DbRecorder(db_path) - asyncio.run(db.init_db()) + current_dir = Path(__file__).parent + alembic_ini_path = current_dir.parent.parent.parent / "alembic.ini" + alembic_cfg = AlembicConfig(alembic_ini_path) + # Only set the db path if it's provided. Otherwise use the one in alembic.ini + if db_path: + alembic_cfg.set_main_option("sqlalchemy.url", f"sqlite:///{db_path}") + + try: + alembic_command.upgrade(alembic_cfg, "head") + except OperationalError: + # An OperationalError is expected if the DB already exists, i.e. it was created before + # migrations were introduced. In this case, we need to stamp the DB with the initial + # revision and then upgrade it to the latest revision. + alembic_command.stamp(alembic_cfg, "30d0144e1a50") + alembic_command.upgrade(alembic_cfg, "head") + logger.info("DB initialized successfully.") if __name__ == "__main__": From 34907a9e48466fccd2b0b693b7fad29472e24eb0 Mon Sep 17 00:00:00 2001 From: Alejandro Ponce Date: Wed, 15 Jan 2025 14:32:06 +0200 Subject: [PATCH 11/11] Remove unuseful comments from alembic.ini --- alembic.ini | 75 ++++------------------------------------------- migrations/env.py | 5 ---- 2 files changed, 6 insertions(+), 74 deletions(-) diff --git a/alembic.ini b/alembic.ini index 5da2bb31..68d4eab1 100644 --- a/alembic.ini +++ b/alembic.ini @@ -1,82 +1,19 @@ -# A generic, single database configuration. +# Database configuration. +# See the full list of options at: +# https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file [alembic] # path to migration scripts # Use forward slashes (/) also on windows to provide an os agnostic path script_location = migrations -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file -# for all available tokens -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - # sys.path path, will be prepended to sys.path if present. # defaults to the current working directory. prepend_sys_path = . -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python>=3.9 or backports.zoneinfo library. -# Any required deps can installed by adding `alembic[tz]` to the pip requirements -# string value is passed to ZoneInfo() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to migrations/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions - -# version path separator; As mentioned above, this is the character used to split +# version path separator; is the character used to split # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -# version_path_separator = newline -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 +version_path_separator = os # Use os.pathsep. +# DB connection string sqlalchemy.url = sqlite:///codegate_volume/db/codegate.db - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary -# hooks = ruff -# ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME diff --git a/migrations/env.py b/migrations/env.py index dd2e9d0f..0729ead0 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -5,11 +5,6 @@ # access to the values within the .ini file in use. config = context.config -# Interpret the config file for Python logging. -# This line sets up loggers basically. -# if config.config_file_name is not None: -# fileConfig(config.config_file_name) - # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel