diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml new file mode 100644 index 0000000..691ff77 --- /dev/null +++ b/.github/workflows/integration_tests.yml @@ -0,0 +1,10 @@ +name: Run integration tests +on: + pull_request: + +jobs: + integration_tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: cd integration_tests && make dctest diff --git a/docker-compose.yml b/docker-compose.yml index a92822e..39cabee 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,15 +1,22 @@ services: db: - image: postgres:17 + hostname: db + image: postgres:18 restart: always environment: POSTGRES_DB: elekto POSTGRES_USER: root POSTGRES_PASSWORD: root volumes: - - pgdata:/var/lib/postgresql/data + - pgdata:/var/lib/postgresql ports: - '5432:5432' + healthcheck: + test: [ "CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}" ] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s volumes: pgdata: diff --git a/elekto/constants.py b/elekto/constants.py index b6bdc14..800bbce 100644 --- a/elekto/constants.py +++ b/elekto/constants.py @@ -13,16 +13,23 @@ # limitations under the License. # # Author(s): Manish Sahani +import os # Application's CSRF Security CSRF_STATE = 'state' AUTH_STATE = 'authentication' -# Github Endpoints -GITHUB_AUTHORIZE = 'https://github.com/login/oauth/authorize' -GITHUB_ACCESS = 'https://github.com/login/oauth/access_token' -GITHUB_PROFILE = 'https://api.github.com/user' +# GitHub Endpoints +# TODO: Make the GitHub externally configurable/overridable. If not specified in env, use github.com +github_host = 'https://github.com' +# github_host = 'http://localhost:9000' +if os.environ.get('INTEGRATION_TEST') == 'true': + github_host = 'http://github:9000' + +GITHUB_AUTHORIZE = f'{github_host}/login/oauth/authorize' +GITHUB_ACCESS = f'{github_host}/login/oauth/access_token' +GITHUB_PROFILE = f'{github_host}/user' # Election attributes related constants ELEC_STAT_COMPLETED = 'completed' diff --git a/entrypoint.sh b/entrypoint.sh index d660b51..1464b63 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -12,7 +12,7 @@ if [ "$APP_DEBUG" == "True" ]; then echo "in Debug mode" ./console --run else - if [ $APP_CONNECT == "socket" ]; then + if [ "$APP_CONNECT" == "socket" ]; then # socket mode for fronting by nginx echo "with a socket connection on $APP_PORT" uwsgi --module elekto:APP --processes 8 --socket :$APP_PORT diff --git a/integration_tests/Dockerfile b/integration_tests/Dockerfile new file mode 100644 index 0000000..b00796b --- /dev/null +++ b/integration_tests/Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.11 + +WORKDIR /app + +COPY requirements.txt . +RUN pip install -r requirements.txt +RUN playwright install +RUN playwright install-deps + +COPY tests tests + +CMD ["pytest", "tests"] diff --git a/integration_tests/Makefile b/integration_tests/Makefile new file mode 100644 index 0000000..110554e --- /dev/null +++ b/integration_tests/Makefile @@ -0,0 +1,19 @@ +PYTEST:=./venv/bin/pytest +PIP:=./venv/bin/pip + +.PHONY: clean +clean: + rm -rf ./venv + +.PHONY: venv +venv: clean + python3.11 -m venv venv + $(PIP) install -r requirements.txt + +.PHONY: test +test: + $(PYTEST) + +.PHONY: dctest +dctest: + docker compose --profile test up --abort-on-container-exit --exit-code-from integration_tests diff --git a/integration_tests/README.md b/integration_tests/README.md new file mode 100644 index 0000000..0d483ea --- /dev/null +++ b/integration_tests/README.md @@ -0,0 +1,30 @@ +# Elekto integration tests +This directory contains the integration tests for Elekto. These test focus on the integration with Github and the way +Github usernames flow through the Elekto application. + +## Running tests +The setup is not bootstrapped (yet), so various manual steps are required to run the required local infra. Then the +integration tests can be run. + +### Infra +These tests require the following to be running: +- elekto + - Change the Github endpoints in `elekto/constants.py` to: + - GITHUB_AUTHORIZE = 'http://localhost:9000/login/oauth/authorize' + - GITHUB_ACCESS = 'http://localhost:9000/login/oauth/access_token' + - GITHUB_PROFILE = 'http://localhost:9000/user' + - Start with `python console --run` (in the Elekto project). + - Optionally also do `docker compose up` if you want to use the Postgres database it provides. +- github-static-mock + - https://github.com/oduludo/github-oauth-mock + - Start the required Redis server with `docker compose up` in the github-oauth-mock project. + - Install dependencies with `poetry install`. + - Start the mock server with `poetry run start`. + +### Tests +Tests can be run from the `elekto/integration_tests` directory. Tests runner is Pytest, headless browser testing is done +using Playwright. A virtual environment is required to run the tests. Tests assume all infra runs at the default ports. +- Create the virtual env with `make venv`. This will also install dependencies. +- Run tests with `make test`. + +To run tests entirely in a Docker Compose setup, use `make dctest`. diff --git a/integration_tests/docker-compose.yaml b/integration_tests/docker-compose.yaml new file mode 100644 index 0000000..621278b --- /dev/null +++ b/integration_tests/docker-compose.yaml @@ -0,0 +1,51 @@ +include: + - ../docker-compose.yml # Defines the `db` service + - ./services/github-static-mock/docker-compose.yaml # Defines the `github` mock backend service + +services: + integration_tests: + build: . + environment: + ELEKTO_HOST: "elekto:8000" + GITHUB_HOST: "github:9000" + depends_on: + elekto: + condition: service_healthy + db: + condition: service_healthy + profiles: + - test + volumes: + - "./tests:/app/tests" + elekto: + build: ../ + user: "root" # To access /app/meta/.git/FETCH_HEAD + ports: + # Links will point to `http://elekto:8000/`, so you must manually patch the URLs to localhost in your browser + # after each redirect. Exposing the Elekto service allows for manual checking during tests if desired. + - "8000:8000" + environment: + DB_CONNECTION: postgresql + DB_HOST: db + DB_PORT: 5432 + DB_PASSWORD: root + DB_DATABASE: elekto + INTEGRATION_TEST: true + APP_PORT: 8000 + META_REPO: https://github.com/elekto-io/elekto.meta.test.git + ELECTION_DIR: elections + META_DEPLOYMENT: local + META_PATH: meta + META_BRANCH: main + META_SECRET: xxx + healthcheck: + test: [ "CMD", "curl", "-f", "http://0.0.0.0:8000/app/" ] + interval: 10s + retries: 5 + start_period: 5s + timeout: 10s + depends_on: + db: + condition: service_healthy + profiles: + - test diff --git a/integration_tests/requirements.txt b/integration_tests/requirements.txt new file mode 100644 index 0000000..d5efb05 --- /dev/null +++ b/integration_tests/requirements.txt @@ -0,0 +1,28 @@ +attrs==25.4.0 +certifi==2025.10.5 +charset-normalizer==3.4.3 +greenlet==3.2.4 +h11==0.16.0 +idna==3.10 +iniconfig==2.1.0 +outcome==1.3.0.post0 +packaging==25.0 +playwright==1.55.0 +pluggy==1.6.0 +pyee==13.0.0 +Pygments==2.19.2 +PySocks==1.7.1 +pytest==8.4.2 +pytest-base-url==2.1.0 +pytest-playwright==0.7.1 +python-slugify==8.0.4 +requests==2.32.5 +sniffio==1.3.1 +sortedcontainers==2.4.0 +text-unidecode==1.3 +trio==0.31.0 +trio-websocket==0.12.2 +typing_extensions==4.15.0 +urllib3==2.5.0 +websocket-client==1.9.0 +wsproto==1.2.0 diff --git a/integration_tests/services/github-static-mock/Dockerfile b/integration_tests/services/github-static-mock/Dockerfile new file mode 100644 index 0000000..75eb16c --- /dev/null +++ b/integration_tests/services/github-static-mock/Dockerfile @@ -0,0 +1,13 @@ +FROM python:3.13 + +WORKDIR /app + +# Install Poetry in a place that's already in $PATH +ENV POETRY_HOME=/usr/local +RUN curl -sSL https://install.python-poetry.org | python3 - + +COPY . . +RUN poetry install + +EXPOSE 9000 +CMD ["poetry", "run", "start"] diff --git a/integration_tests/services/github-static-mock/README.md b/integration_tests/services/github-static-mock/README.md new file mode 100644 index 0000000..c958112 --- /dev/null +++ b/integration_tests/services/github-static-mock/README.md @@ -0,0 +1,55 @@ +# Static oauth GitHub mock +This project mocks GitHub oauth server's [web application flow](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/authorizing-oauth-apps#web-application-flow). +As this is a static project, codes and tokens are hardcoded. An endpoint is available to set the user data to be +returned on the next call to the `/user` endpoint. By doing this, a webpage with form input on the mocked user is +avoided, keeping this simple for automated tests. + +WARNING: This is a project for testing purposes only. Nothing about this implementation is secure. + +## Running the project +Currently only the Redis cache runs on Docker Compose. This stores the upcoming user's data. Run it with +`docker compose up`. + +The web application itself can be run with `poetry run start` after running `poetry install`. + +## Endpoints +### Authorization mechanics +The oauth endpoints mocked facilitate the web application flow. Calls in order are: +1. Call `/system/upcoming-user` to set the next mocked user data. +2. Call `/login/oauth/authorize` to have the application return a response with a code and state. +3. Call `/login/oauth/access_token` to mock exchanging the code for a bearer token. +4. Call `/user` to obtain user data. + +## /system/upcoming-user +Example call (using [HTTPie](https://httpie.io/)): + +```shell +http "http://localhost:9000/system/upcoming-user" name="John Smith" login=jsmith +``` + +## /login/oauth/authorize +Authorization requires query parameters: response_type, client_id, scope, state and redirect_uri. The redirect_uri is +used to redirect the client. The state is included in the redirect URI for further use by the client. The code in the +redirect URI is hardcoded. + +```shell +http "http://localhost:9000/login/oauth/authorize?response_type=code&client_id=Ov23liuEhYT3CT9Yh6VA&scope=user%3Alogin%2Cname&state=JQOy3kw1PDiQh662ln4DuTGX20ajwb&redirect_uri=http%3A%2F%2Flocalhost%3A8000%2Foauth%2Fgithub%2Fcallback" +``` + +## /login/oauth/access_token +This endpoint takes a token and returns a bearer token. The bearer token is hardcoded. This endpoint requires +application/x-www-form-urlencoded data. + +```shell +http --form POST "http://localhost:9000/login/oauth/access_token" grant_type=authorization_code code=SplxlOBeZQQYbYS6WxSbIA +``` + +## /user +Finally, the user endpoint is called. This returns the user's profile data. The `login` and `name` in the response are +retrieved from Redis and are the values set via the `/system/upcoming-user` endpoint. This endpoint requires an +Authorization header containing `Bearer `, the token itself is not evaluated. Calling this endpoint without setting an +upcoming user in cache results in an HTTP400 Bad Request. + +```shell +http "http://localhost:9000/user" "Authorization: Bearer gho_xxx" +``` diff --git a/integration_tests/services/github-static-mock/docker-compose.yaml b/integration_tests/services/github-static-mock/docker-compose.yaml new file mode 100644 index 0000000..807bdcc --- /dev/null +++ b/integration_tests/services/github-static-mock/docker-compose.yaml @@ -0,0 +1,18 @@ +services: + cache: + image: redis + restart: always + ports: + - '6379:6379' + profiles: + - test + github: + build: . + ports: + - '9000:9000' + environment: + CACHE_HOST: 'cache' + ELEKTO_HOST: 'elekto' + ELEKTO_PORT: '8000' + profiles: + - test diff --git a/integration_tests/services/github-static-mock/poetry.lock b/integration_tests/services/github-static-mock/poetry.lock new file mode 100644 index 0000000..46cc4a0 --- /dev/null +++ b/integration_tests/services/github-static-mock/poetry.lock @@ -0,0 +1,353 @@ +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.10.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1"}, + {file = "anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "click" +version = "8.2.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "fastapi" +version = "0.116.1" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565"}, + {file = "fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.40.0,<0.48.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "pydantic" +version = "2.11.7" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "python-multipart" +version = "0.0.20" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, + {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, +] + +[[package]] +name = "redis" +version = "6.4.0" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f"}, + {file = "redis-6.4.0.tar.gz", hash = "sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010"}, +] + +[package.extras] +hiredis = ["hiredis (>=3.2.0)"] +jwt = ["pyjwt (>=2.9.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (>=20.0.1)", "requests (>=2.31.0)"] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "starlette" +version = "0.47.2" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b"}, + {file = "starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, + {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "uvicorn" +version = "0.35.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a"}, + {file = "uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.13" +content-hash = "02fee0b0bed394a34e37a88f628a69a4a18f746f6db50b32964bed0c385d9064" diff --git a/integration_tests/services/github-static-mock/pyproject.toml b/integration_tests/services/github-static-mock/pyproject.toml new file mode 100644 index 0000000..c88b126 --- /dev/null +++ b/integration_tests/services/github-static-mock/pyproject.toml @@ -0,0 +1,25 @@ +[project] +name = "github-static-mock" +version = "0.1.0" +description = "" +authors = [ + {name = "Your Name",email = "you@example.com"} +] +readme = "README.md" +requires-python = ">=3.13" +dependencies = [ + "fastapi (>=0.116.1,<0.117.0)", + "uvicorn (>=0.35.0,<0.36.0)", + "python-multipart (>=0.0.20,<0.0.21)", + "redis (>=6.4.0,<7.0.0)" +] + +[tool.poetry] +packages = [{include = "github_static_mock", from = "src"}] + +[tool.poetry.scripts] +start = "github_static_mock.app:start" + +[build-system] +requires = ["poetry-core>=2.0.0,<3.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/integration_tests/services/github-static-mock/src/github_static_mock/__init__.py b/integration_tests/services/github-static-mock/src/github_static_mock/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/integration_tests/services/github-static-mock/src/github_static_mock/app.py b/integration_tests/services/github-static-mock/src/github_static_mock/app.py new file mode 100644 index 0000000..47f3d63 --- /dev/null +++ b/integration_tests/services/github-static-mock/src/github_static_mock/app.py @@ -0,0 +1,142 @@ +import json +import logging +import os + +from typing import Annotated + +from redis.asyncio import Redis + +from fastapi import FastAPI, Form, Response, Request, Query, status, HTTPException +import uvicorn + +from github_static_mock.exceptions import NoUpcomingUserException +from github_static_mock.models import AuthorizeQueryParams, AccessTokenResponse, UserResponse, UpcomingUser + +logger = logging.getLogger(__name__) +app = FastAPI() + +CACHE_HOST = os.environ.get("CACHE_HOST", "localhost") + + +async def get_upcoming_user() -> UpcomingUser: + cache = Redis(host=CACHE_HOST, port=6379, db=0) + raw_data = await cache.get('upcoming_user') + + if raw_data is None: + raise NoUpcomingUserException() + + return UpcomingUser(**json.loads(raw_data.decode('utf-8'))) + + +@app.get('/login/oauth/authorize') +def authorize(query: Annotated[AuthorizeQueryParams, Query()]) -> Response: + """ + Authorize application. + + http://localhost:9000/login/oauth/authorize?response_type=code&client_id=Ov23liuEhYT3CT9Yh6VA&scope=user%3Alogin%2Cname&state=JQOy3kw1PDiQh662ln4DuTGX20ajwb&redirect_uri=http%3A%2F%2Flocalhost%3A8000%2Foauth%2Fgithub%2Fcallback + + :param request: + :param query: + :return: + """ + logger.info(f'Authorize request: {query.model_dump_json()}') + print(query.redirect_uri) + + return Response( + status_code=status.HTTP_302_FOUND, + headers={ + 'Location': f'{query.redirect_uri}?code=SplxlOBeZQQYbYS6WxSbIA&state={query.state}' + }, + ) + + +@app.post('/login/oauth/access_token') +async def access_token(grant_type: Annotated[str, Form()], code: Annotated[str, Form()]) -> AccessTokenResponse: + """ + Exchange code for access token. + + NOTE: This endpoint receives form data (application/x-www-form-urlencoded), not JSON. This is the standard for the + GitHub API. While GitHub also supports application/json and application/xml, this mock does not. + + As this endpoint implementation is static, the grant_type and code are ignored. The access token is specific to the + mocked user (using the login name) to prevent the client application perceiving all requests as coming from the same + user. + + :return: + """ + upcoming_user = await get_upcoming_user() + + return AccessTokenResponse( + access_token=f'gho_myososecretbearertoken_{upcoming_user.login}', + token_type='bearer', + scope='' + ) + + +@app.get('/user') +async def user(request: Request) -> UserResponse: + if 'Bearer ' not in request.headers.get('Authorization', ''): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + ) + + upcoming_user = await get_upcoming_user() + + return UserResponse(**{ + 'login': upcoming_user.login, + 'id': 14993302, + 'node_id': 'MDQ6VXNlcjE0OTkzMzAy', + 'avatar_url': 'https://avatars.githubusercontent.com/u/14993302?v=4', + 'gravatar_id': '', + 'url': 'https://api.github.com/users/oduludo', + 'html_url': 'https://github.com/oduludo', + 'followers_url': 'https://api.github.com/users/oduludo/followers', + 'following_url': 'https://api.github.com/users/oduludo/following{/other_user}', + 'gists_url': 'https://api.github.com/users/oduludo/gists{/gist_id}', + 'starred_url': 'https://api.github.com/users/oduludo/starred{/owner}{/repo}', + 'subscriptions_url': 'https://api.github.com/users/oduludo/subscriptions', + 'organizations_url': 'https://api.github.com/users/oduludo/orgs', + 'repos_url': 'https://api.github.com/users/oduludo/repos', + 'events_url': 'https://api.github.com/users/oduludo/events{/privacy}', + 'received_events_url': 'https://api.github.com/users/oduludo/received_events', + 'type': 'User', + 'user_view_type': 'public', + 'site_admin': False, + 'name': upcoming_user.name, + 'company': 'Monsters, Inc.', + 'blog': '', + 'location': 'The Netherlands', + 'email': None, + 'hireable': None, + 'bio': None, + 'twitter_username': None, + 'notification_email': None, + 'public_repos': 6, + 'public_gists': 3, + 'followers': 7, + 'following': 5, + 'created_at': '2015-10-06T08:40:53Z', + 'updated_at': '2025-08-06T15:43:33Z' + }) + + +@app.post('/system/upcoming-user') +async def store_upcoming_user(request: Request, data: UpcomingUser) -> Response: + """ + Set the upcoming user's data to be returned from the next call to the /user endpoint. + + :param request: + :param data: + :return: + """ + cache = Redis(host=CACHE_HOST, port=6379, db=0) + await cache.set('upcoming_user', data.model_dump_json()) + return Response(status_code=status.HTTP_201_CREATED) + + +def start() -> None: + """Launched with `poetry run start` at root level""" + uvicorn.run('github_static_mock.app:app', host='0.0.0.0', port=9000, reload=True) + +if __name__ == '__main__': + start() diff --git a/integration_tests/services/github-static-mock/src/github_static_mock/exceptions.py b/integration_tests/services/github-static-mock/src/github_static_mock/exceptions.py new file mode 100644 index 0000000..3dbf07a --- /dev/null +++ b/integration_tests/services/github-static-mock/src/github_static_mock/exceptions.py @@ -0,0 +1,10 @@ +from fastapi.exceptions import HTTPException +from starlette import status + + +class NoUpcomingUserException(HTTPException): + def __init__(self): + super().__init__( + status_code=status.HTTP_400_BAD_REQUEST, + detail='No upcoming user configured.' + ) diff --git a/integration_tests/services/github-static-mock/src/github_static_mock/models.py b/integration_tests/services/github-static-mock/src/github_static_mock/models.py new file mode 100644 index 0000000..0597614 --- /dev/null +++ b/integration_tests/services/github-static-mock/src/github_static_mock/models.py @@ -0,0 +1,63 @@ +import os +from typing import Literal + +from pydantic import BaseModel, HttpUrl + +ELEKTO_HOST = os.environ.get('ELEKTO_HOST', 'localhost') +ELEKTO_PORT = os.environ.get('ELEKTO_PORT', '8000') + + +class AuthorizeQueryParams(BaseModel): + client_id: str + response_type: Literal['code'] + scope: str + state: str + redirect_uri: HttpUrl = f'http://{ELEKTO_HOST}:{ELEKTO_PORT}/oauth/github/callback' + + +class AccessTokenResponse(BaseModel): + access_token: str + token_type: str + scope: str + + +class UserResponse(BaseModel): + login: str + id: int + node_id: str + avatar_url: str + gravatar_id: str + url: str + html_url: str + followers_url: str + following_url: str + gists_url: str + starred_url: str + subscriptions_url: str + organizations_url: str + repos_url: str + events_url: str + received_events_url: str + type: str + user_view_type: str + site_admin: bool + name: str + company: str + blog: str + location: str + email: str | None + hireable: bool | None + bio: str | None + twitter_username: str | None + notification_email: str | None + public_repos: int + public_gists: int + followers: int + following: int + created_at: str + updated_at: str + + +class UpcomingUser(BaseModel): + name: str + login: str diff --git a/integration_tests/services/github-static-mock/tests/__init__.py b/integration_tests/services/github-static-mock/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/integration_tests/tests/conftest.py b/integration_tests/tests/conftest.py new file mode 100644 index 0000000..6b9e7c6 --- /dev/null +++ b/integration_tests/tests/conftest.py @@ -0,0 +1,34 @@ +import os +import pytest + +from utils.github_mock.client import GithubMockUtilityClient + + +@pytest.fixture +def host() -> str: + return 'http://' + os.environ.get('ELEKTO_HOST', 'localhost:8000') + + +@pytest.fixture +def login_url(host: str) -> str: + return f'{host}/login' + + +@pytest.fixture +def logout_url(host: str) -> str: + return f'{host}/logout' + + +@pytest.fixture +def app_url(host: str) -> str: + return f'{host}/app' + + +@pytest.fixture +def github_mock_host() -> str: + return 'http://' + os.environ.get('GITHUB_HOST', 'localhost:9000') + + +@pytest.fixture +def github_mock_utility(github_mock_host: str) -> GithubMockUtilityClient: + return GithubMockUtilityClient(host=github_mock_host) diff --git a/integration_tests/tests/test_login.py b/integration_tests/tests/test_login.py new file mode 100644 index 0000000..b7a2d56 --- /dev/null +++ b/integration_tests/tests/test_login.py @@ -0,0 +1,43 @@ +import os + +import pytest +from playwright.sync_api import Page, expect + +from utils.github_mock import GithubMockUtilityClient, User + + +ELEKTO_HOST = os.environ.get('ELEKTO_HOST', 'localhost:8000') + + +def logout(page: Page) -> None: + page.goto(f'http://{ELEKTO_HOST}/app') + logout_link = page.get_by_role('link', name='Logout') + if logout_link.is_visible(): + logout_link.click() + + +@pytest.mark.parametrize( + 'user', + [ + User(name='Jack', login='jack'), + User(name='Jill', login='jill'), + ] +) +def test_login(page: Page, login_url: str, logout_url: str, github_mock_utility: GithubMockUtilityClient, user: User) -> None: + """ + Test that login is working as expected. + + Multiple users should be able to log in and the dashboard should change content based on the authenticated user. + """ + logout(page) # Ensure we start with fresh state + + github_mock_utility.store_upcoming_user(user=user) + + page.goto(login_url) + + expect(page.get_by_text('Sign in with Github')).to_be_visible() + + page.get_by_role('button', name='Sign in with Github').click() + expect(page).to_have_title('Dashboard | Elekto') + expect(page.get_by_text(f'Welcome! {user.name}')).to_be_visible() + logout(page) diff --git a/integration_tests/tests/utils/__init__.py b/integration_tests/tests/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/integration_tests/tests/utils/github_mock/__init__.py b/integration_tests/tests/utils/github_mock/__init__.py new file mode 100644 index 0000000..55ab167 --- /dev/null +++ b/integration_tests/tests/utils/github_mock/__init__.py @@ -0,0 +1,4 @@ +from .client import GithubMockUtilityClient +from .models import User + +__all__ = ['GithubMockUtilityClient', 'User'] diff --git a/integration_tests/tests/utils/github_mock/client.py b/integration_tests/tests/utils/github_mock/client.py new file mode 100644 index 0000000..93d9c96 --- /dev/null +++ b/integration_tests/tests/utils/github_mock/client.py @@ -0,0 +1,28 @@ +import requests + +from .models import User + + +class GithubMockUtilityClient: + def __init__(self, host: str): + self.host = host + + def store_upcoming_user(self, user: User) -> None: + """ + Set user data for the next mocked Github login. + + IRL users would log in at Github and then return to Elekto with a code. Elekto uses the code to get data for + that authenticated user. Our tests mock the Github part, causing the code to not point to a particular user. To + make up for this, we can set fake user data in the mock server. The next user lookup populates the 'name' and + 'login' fields with the fake data we sent in using the /system/upcoming-user call. + + Args: + user: The User object to set mock data from. + + Returns: None + + """ + resp = requests.post(f'{self.host}/system/upcoming-user', json=user.to_dict()) + + if resp.status_code != 201: + raise Exception(resp.text) diff --git a/integration_tests/tests/utils/github_mock/models.py b/integration_tests/tests/utils/github_mock/models.py new file mode 100644 index 0000000..d54c1ea --- /dev/null +++ b/integration_tests/tests/utils/github_mock/models.py @@ -0,0 +1,10 @@ +class User: + def __init__(self, name: str, login: str): + self.name = name + self.login = login + + def to_dict(self) -> dict: + return { + 'login': self.login, + 'name': self.name, + }