Microframework for python aws lambdas
Project description
braincube-aws-core
Microframework for Python AWS lambdas.
Installation
pip install braincube-aws-core-alpha
Built With
- asyncpg - A fast PostgreSQL Database Client Library for Python/asyncio.
- pydantic - Data validation using Python type hints.
- pypika - Python Query Builder.
Application Controllers Example
import asyncio
from http import HTTPStatus
from core.rest.data import HTTPRequest, HTTPResponse
from core.rest.app_module import AppModule
from core.rest.app_controller import AppController
from pydantic import BaseModel
class AccountDto(BaseModel):
id: int
iban: str
bban: str
_accounts = [
AccountDto(1, "EUR27100777770209299700", "EURC12345612345678"),
AccountDto(2, "GR27100777770209299700", "GRC12345612345678"),
]
app = AppController("/accounts")
@app.get("/{id}")
async def get_account(request: HTTPRequest) -> HTTPResponse:
account = next((a for a in _accounts if a.id == request.path_parameters["id"]), None)
return HTTPResponse(HTTPStatus.OK if account else HTTPStatus.NO_CONTENT, account)
@app.post()
async def create_account(request: HTTPRequest[AccountDto]) -> HTTPResponse:
return HTTPResponse(HTTPStatus.OK)
loop = asyncio.get_event_loop()
module = AppModule([app])
def main(event, context):
return loop.run_until_complete(module.serve(event, context))
Dependency Injection Example
from core.di.injector import inject
from core.dal.postgres_connection import get_pool, Pool
@inject("data_warehouse_pool")
async def provide_warehouse_pool() -> Pool:
return await get_pool()
@inject(qualifier="pool:data_warehouse_pool")
class BankService:
def __init__(self, pool: Pool):
self._pool = pool
Postgres Repository Example
from core.rest.data import HTTPRequest
from core.utils.data import Order, OrderType
from core.dal.data import Key, Schema, Column, Relation, SimpleColumn, JoinType, JoinThrough, StatementField
from core.dal.postgres_connection import get_pool, Pool
from core.dal.postgres_repository import PostgresRepository
# schema definition
equities = Schema(
table="equities",
alias="e",
primary_key=["id"],
order=[Order(type=OrderType.asc, alias="name")],
statement_fields=[
StatementField(alias="isTypeOne",
statement="CASE WHEN e.type = 1 then True else False END",
relations_aliases=[])
],
columns=[
Column(name="id", updatable=False, insertable=False),
Column(name="name"),
Column(name="type"),
Column(name="issuer_id", alias="issuerId"),
Column(name="industry_sector", alias="industrySector"),
Column(name="isin"),
Column(name="reference"),
Column(name="bloomberg_code", alias="bloombergCode"),
Column(name="market_symbol", alias="marketSymbol"),
Column(name="currency"),
Column(name="country", ),
Column(name="min_amount", alias="minAmount"),
],
relations=[
Relation(
table="parties",
alias="p",
join_forced=False,
columns=[
SimpleColumn(name="name"),
SimpleColumn(name="short_name", alias="shortName"),
],
join_type=JoinType.left,
join_through=JoinThrough(from_column_name="issuer_id", to_column_name="id")
)
]
)
# repository definition
class EquitiesRepo(PostgresRepository):
def __init__(self, pool: Pool):
super().__init__(pool, equities)
# repository usage
request = HTTPRequest()
repo = EquitiesRepo(await get_pool())
await repo.find_by_pk(key=Key(request.path_parameters["id"]), aliases=request.query_parameters.fields)
await repo.exists_by_pk(key=Key("9448a57b-f686-4935-b152-566baab712db"))
await repo.find_one(
aliases=request.query_parameters.fields,
conditions=request.query_parameters.conditions,
order=request.query_parameters.order)
await repo.find_all(
aliases=request.query_parameters.fields,
conditions=request.query_parameters.conditions,
order=request.query_parameters.order)
await repo.find_all_by_pk([
Key("9448a57b-f686-4935-b152-566baab712db"),
Key("43c8ec37-9a59-44eb-be90-def391ba2f02")],
aliases=request.query_parameters.fields,
order=request.query_parameters.order)
await repo.find_many(
aliases=request.query_parameters.fields,
conditions=request.query_parameters.conditions,
page=request.query_parameters.page,
order=request.query_parameters.order)
await repo.insert(
data={
"name": "Bursa de Valori Bucuresti SA",
"type": 1,
"industrySector": 40,
"isin": "ROBVBAACNOR0",
"bloombergCode": "BBG000BBWMC5",
"marketSymbol": "BVB RO Equity",
"currency": "RON",
"country": "RO",
})
await repo.insert_bulk(
aliases=["name", "type", "industrySector", "isin", "bloombergCode", "marketSymbol", "currency", "country"],
data=[
["Bursa de Valori Bucuresti SA", 1, 40, "ROBVBAACNOR0", "BBG000BBWMC5", "BVB RO Equity", "RON", "RO"],
["Citigroup Inc", 1, 40, "US1729674242", "BBG000FY4S11", "C US Equity", "USD", "US"],
["Coca-Cola HBC AG", 1, 49, "CH0198251305", "BBG004HJV2T1", "EEE GA Equity", "EUR", "GR"],
]
)
await repo.update(
data={
"type": 1,
"isin": 40,
},
conditions=request.query_parameters.conditions,
returning_aliases=request.query_parameters.fields)
await repo.update_by_pk(
Key("9448a57b-f686-4935-b152-566baab712db"),
data={
"type": 1,
"isin": 40,
}, returning_aliases=[])
await repo.delete(
conditions=request.query_parameters.conditions,
returning_aliases=["id", "name", "type"])
await repo.delete_by_pk(
Key("9448a57b-f686-4935-b152-566baab712db"),
returning_aliases=["id", "name", "type"])
await repo.fetch("SELECT * FROM equities WHERE type = $1 and isin = $2", [1, "TREEGYO00017"])
await repo.fetch_one("SELECT * FROM equities WHERE id = $1", ["2b67122a-f47e-41b1-b7f7-53be5ca381a0"])
await repo.execute("DELETE FROM equities WHERE id = $1", ["2b67122a-f47e-41b1-b7f7-53be5ca381a0"])
Query params format
fields=name, type, industrySector, isin, bloombergCode, parties_name, parties_shortName
type=1
isin=range(40, 49)
id=any(9448a57b-f686-4935-b152-566baab712db, 43c8ec37-9a59-44eb-be90-def391ba2f02)
page_no=1
page_size=50
top_size=50
order=name, id DESC
Local Development Requirements
To use the SAM CLI, you need the following tools.
Run server locally
# open ssh tunel
sudo sh ssh_tunnel_Analog_JBox.sh
# apply code changes to docker image
sam-api$ sam build
# start server locally on http://127.0.0.1:3000
sam-api$ sam local start-api --warm-containers EAGER
# or run function locally using event.json as parameter
sam-api$ sam local invoke ApiFunction --event events/event.json
Deploy to AWS
sam build --use-container
sam deploy --guided --profile analog_user --region eu-west-1
Build and deploy new package version using twine
python3 -m pip install --upgrade pip
python3 -m pip install --upgrade build
python3 -m pip install --upgrade twine
python3 -m build
twine upload --skip-existing dist/*
Resources
Project details
Release history Release notifications | RSS feed
Download files
Download the file for your platform. If you're not sure which to choose, learn more about installing packages.
Source Distribution
Built Distribution
Close
Hashes for braincube-aws-core-alpha-0.0.28.tar.gz
Algorithm | Hash digest | |
---|---|---|
SHA256 | 58632a80f4a65635989b14f0fac0839b23e787f67765fd0bdb076749a105de4d |
|
MD5 | 1822d182e8ec18f90abe08244b1f3462 |
|
BLAKE2b-256 | eb55686c8a6d59750822ae670cb38f2fb3560e7a7bd22a5a0a0c43f1a64b0a2b |
Close
Hashes for braincube_aws_core_alpha-0.0.28-py3-none-any.whl
Algorithm | Hash digest | |
---|---|---|
SHA256 | b3abab29d9b951536412636181081fd7dba5e3e89a0b14be8a19ef6f0f87ad79 |
|
MD5 | f97cbddca6888e2c2c5a6398372998b2 |
|
BLAKE2b-256 | 516e55c31db3b62d055f268968275d9be99408c50fe5d7a85315c53b87b68cb7 |