Skip to main content

Microframework for python aws lambdas

Project description

braincube-aws-core

Microframework for Python AWS lambdas.

Language pypi License

Installation

pip install braincube-aws-core

Built With

  • asyncpg - A fast PostgreSQL Database Client Library for Python/asyncio.
  • pydantic - Data validation using Python type hints.
  • pypika - Python Query Builder.

Application Controllers Example

import asyncio

from uuid import uuid4
from http import HTTPStatus

from core.app.data import HTTPRequest, HTTPResponse
from core.app.app_module import AppModule
from core.app.app_controller import AppController

from pydantic import BaseModel


class AccountDto(BaseModel):
    iban: str
    bban: str


data = {
    "a0a412d9-87ef-474b-9ac8-b682ec5e0fb3": AccountDto(iban="EUR27100777770209299700", bban="EURC12345612345678"),
    "5ebc25bd-e152-4a70-b251-d68e43be581e": AccountDto(iban="GR27100777770209299700", bban="GRC12345612345678"),
}

app = AppController("/accounts")


@app.get("/{id}")
async def get_account(request: HTTPRequest) -> HTTPResponse:
    account = data.get(request.path_params["id"])
    return HTTPResponse(HTTPStatus.OK if account else HTTPStatus.NO_CONTENT, account)


@app.post()
async def create_account(request: HTTPRequest[AccountDto]) -> HTTPResponse:
    data[uuid4()] = request.body
    return HTTPResponse(HTTPStatus.CREATED)


loop = asyncio.get_event_loop()

module = AppModule([app])


def main(event, context):
    return loop.run_until_complete(module.serve(event, context))

Dependency Injection Example

from core.di.injector import inject
from core.dal.postgres_connection import get_pool, Pool


@inject("data_warehouse_pool")
async def provide_warehouse_pool() -> Pool:
    return await get_pool()


@inject(qualifier="pool:data_warehouse_pool")
class BankService:

    def __init__(self, pool: Pool):
        self._pool = pool

Postgres Repository Example

from core.app.data import HTTPRequest
from core.utils.data import Order, OrderType
from core.dal.data import Key, Schema, Column, Relation, SimpleColumn, JoinType, JoinThrough, StatementField
from core.dal.postgres_connection import get_pool, Pool
from core.dal.postgres_repository import PostgresRepository

# schema definition
equities = Schema(
    table="equities",
    alias="e",
    primary_key=["id"],
    columns=[
        Column("id", updatable=False, insertable=False),
        Column("name"),
        Column("type"),
        Column("issuer_id", alias="issuerId"),
        Column("industry_sector", alias="industrySector"),
        Column("isin"),
        Column("reference"),
        Column("bloomberg_code", alias="bloombergCode"),
        Column("market_symbol", alias="marketSymbol"),
        Column("currency"),
        Column("country", ),
        Column("min_amount", alias="minAmount"),
    ],
    statement_fields=[
        StatementField("isTypeOne", statement="CASE WHEN e.type = 1 then True else False END")
    ],
    order=[
        Order(type=OrderType.asc, alias="name")
    ],
    relations=[
        Relation(
            table="parties",
            alias="p",
            columns=[
                SimpleColumn("name"),
                SimpleColumn("short_name", alias="shortName"),
            ],
            join_forced=False,
            join_type=JoinType.left,
            join_through=JoinThrough(from_column_name="issuer_id", to_column_name="id")
        )
    ]
)


# repository definition
class EquitiesRepo(PostgresRepository):

    def __init__(self, pool: Pool):
        super().__init__(pool, equities)


# repository usage

request = HTTPRequest()

repo = EquitiesRepo(await get_pool())

await repo.find_by_pk(Key(request.path_params["id"]), request.query_params.fields)

await repo.exists_by_pk(Key("9448a57b-f686-4935-b152-566baab712db"))

await repo.find_one(
    request.query_params.fields,
    conditions=request.query_params.conditions,
    order=request.query_params.order)

await repo.find_all(
    request.query_params.fields,
    conditions=request.query_params.conditions,
    order=request.query_params.order)

await repo.find_all_by_pk(
    [
        Key("9448a57b-f686-4935-b152-566baab712db"),
        Key("43c8ec37-9a59-44eb-be90-def391ba2f02")
    ],
    aliases=request.query_params.fields,
    order=request.query_params.order)

await repo.find_many(
    request.query_params.fields,
    conditions=request.query_params.conditions,
    page=request.query_params.page,
    order=request.query_params.order)

await repo.insert({
    "name": "Bursa de Valori Bucuresti SA",
    "type": 1,
    "industrySector": 40,
    "isin": "ROBVBAACNOR0",
    "bloombergCode": "BBG000BBWMC5",
    "marketSymbol": "BVB RO Equity",
    "currency": "RON",
    "country": "RO",
})

await repo.insert_bulk(
    aliases=["name", "type", "industrySector", "isin", "bloombergCode", "marketSymbol", "currency", "country"],
    data=[
        ["Bursa de Valori Bucuresti SA", 1, 40, "ROBVBAACNOR0", "BBG000BBWMC5", "BVB RO Equity", "RON", "RO"],
        ["Citigroup Inc", 1, 40, "US1729674242", "BBG000FY4S11", "C US Equity", "USD", "US"],
        ["Coca-Cola HBC AG", 1, 49, "CH0198251305", "BBG004HJV2T1", "EEE GA Equity", "EUR", "GR"],
    ]
)

await repo.update({
    "type": 1,
    "isin": 40,
}, request.query_params.conditions, request.query_params.fields)

await repo.update_by_pk(Key("9448a57b-f686-4935-b152-566baab712db"), {
    "type": 1,
    "isin": 40
})

await repo.delete(request.query_params.conditions, ["id", "name", "type"])

await repo.delete_by_pk(Key("9448a57b-f686-4935-b152-566baab712db"), ["id", "name", "type"])

await repo.fetch("SELECT * FROM equities WHERE type = $1 and isin = $2", [1, "TREEGYO00017"])

await repo.fetch_one("SELECT * FROM equities WHERE id = $1", ["2b67122a-f47e-41b1-b7f7-53be5ca381a0"])

await repo.execute("DELETE FROM equities WHERE id = $1", ["2b67122a-f47e-41b1-b7f7-53be5ca381a0"])

Query params format

fields=name, type, industrySector, isin, bloombergCode, parties_name, parties_shortName
type=1
isin=range(40, 49)
id=any(9448a57b-f686-4935-b152-566baab712db, 43c8ec37-9a59-44eb-be90-def391ba2f02)
page_no=1
page_size=50
top_size=50
order=name, id DESC

Local Development Requirements

To use the SAM CLI, you need the following tools.

Run server locally

# open ssh tunel
sudo sh ssh_tunnel_Analog_JBox.sh
# apply code changes to docker image
sam-api$ sam build
# start server locally on http://127.0.0.1:3000
sam-api$ sam local start-api --warm-containers EAGER
# or run function locally using event.json as parameter
sam-api$ sam local invoke ApiFunction --event events/event.json

Deploy to AWS

sam build --use-container
sam deploy --capabilities CAPABILITY_NAMED_IAM --guided --profile analog_user --region eu-west-1

Build and deploy new package version using twine

python3 -m pip install --upgrade pip
python3 -m pip install --upgrade build
python3 -m pip install --upgrade twine
python3 -m build
twine upload --skip-existing dist/*

Resources

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

braincube-aws-core-0.0.7.tar.gz (20.6 kB view details)

Uploaded Source

Built Distribution

braincube_aws_core-0.0.7-py3-none-any.whl (25.0 kB view details)

Uploaded Python 3

File details

Details for the file braincube-aws-core-0.0.7.tar.gz.

File metadata

  • Download URL: braincube-aws-core-0.0.7.tar.gz
  • Upload date:
  • Size: 20.6 kB
  • Tags: Source
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/4.0.2 CPython/3.10.11

File hashes

Hashes for braincube-aws-core-0.0.7.tar.gz
Algorithm Hash digest
SHA256 ffe0f69c90cdfdf3a14acad64be236d97ceebb12987e24f50425174e0668119a
MD5 57a55a06d3eb22517fd779d89c30aaeb
BLAKE2b-256 b5f574760a9cb28dd81745ba7a63d448d0de2b9be67f85851d1a8b0e1ae2f4a6

See more details on using hashes here.

Provenance

File details

Details for the file braincube_aws_core-0.0.7-py3-none-any.whl.

File metadata

File hashes

Hashes for braincube_aws_core-0.0.7-py3-none-any.whl
Algorithm Hash digest
SHA256 91b2332b15fd279aa05537f874205bec1f92e47c145258c005e883eab7a4d33e
MD5 7b22c31b890f2a0b9812dcc6a271eaf1
BLAKE2b-256 f887bfd744a0244eb6da79be4b6f906ee26524662d05c79e6bbb806bee0ac289

See more details on using hashes here.

Provenance

Supported by

AWS AWS Cloud computing and Security Sponsor Datadog Datadog Monitoring Fastly Fastly CDN Google Google Download Analytics Microsoft Microsoft PSF Sponsor Pingdom Pingdom Monitoring Sentry Sentry Error logging StatusPage StatusPage Status page