Skip to main content

A simple interface for managing database connections and queries

Project description

humble-database

This file will become your README and also the index of your documentation.

Install

pip install humble_database

How to use

Database Example

import pandas as pd
import os
from sqlalchemy import text
from urllib.request import urlretrieve
urlretrieve(
    "http://2016.padjo.org/files/data/starterpack/census-acs-1year/acs-1-year-2015.sqlite",
    filename='acs.db'
)
db = Database(drivername='sqlite',database = 'acs.db')
db.query_to_df("select * from sqlite_schema").head(2)
<style scoped> .dataframe tbody tr th:only-of-type { vertical-align: middle; }
.dataframe tbody tr th {
    vertical-align: top;
}

.dataframe thead th {
    text-align: right;
}
</style>
type name tbl_name rootpage sql
0 table states states 2 CREATE TABLE states (\n year INTEGER , \n ...
1 table congressional_districts congressional_districts 3 CREATE TABLE congressional_districts (\n ye...
db.query_to_df("""select * from states limit 5""")
<style scoped> .dataframe tbody tr th:only-of-type { vertical-align: middle; }
.dataframe tbody tr th {
    vertical-align: top;
}

.dataframe thead th {
    text-align: right;
}
</style>
year name geo_id total_population white black hispanic asian american_indian pacific_islander other_race median_age total_households owner_occupied_homes_median_value per_capita_income median_household_income below_poverty_line foreign_born_population state
0 2015 Alabama 04000US01 4858979 3204076 1296681 192870 58918 19069 2566 5590 38.7 1846390 134100 44765 44765 876016 169972 01
1 2015 Alaska 04000US02 738432 452472 24739 51825 45753 98300 6341 2201 33.3 250185 259600 73355 73355 74532 58544 02
2 2015 Arizona 04000US04 6828065 3802263 282718 2098411 210922 276132 9963 6951 37.4 2463008 194300 51492 51492 1159043 914400 04
3 2015 Arkansas 04000US05 2978204 2174934 466486 207743 41932 18221 7551 3826 37.9 1144663 120700 41995 41995 550508 142841 05
4 2015 California 04000US06 39144818 14815122 2192844 15184545 5476958 135866 143408 87813 36.2 12896357 449100 64500 64500 5891678 10688336 06
db.query_to_records(
    "select * from states limit 2",
)[0]
{'year': 2015, 'name': 'Alabama', 'geo_id': '04000US01', 'total_population': 4858979, 'white': 3204076, 'black': 1296681, 'hispanic': 192870, 'asian': 58918, 'american_indian': 19069, 'pacific_islander': 2566, 'other_race': 5590, 'median_age': 38.7, 'total_households': 1846390, 'owner_occupied_homes_median_value': 134100, 'per_capita_income': 44765, 'median_household_income': 44765, 'below_poverty_line': 876016, 'foreign_born_population': 169972, 'state': '01'}

ORM Example

SQL Alchemy Models

from pydantic import BaseModel,computed_field,field_validator,ConfigDict,Field
from sqlalchemy import ForeignKey
from sqlalchemy.orm import DeclarativeBase,Mapped, mapped_column,relationship
from typing import List
class Base(DeclarativeBase):
    year:  Mapped[int]
    name: Mapped[str]
    geo_id: Mapped[str]
    total_population: Mapped[int]
    white: Mapped[int]
    black: Mapped[int]
    hispanic: Mapped[int]
    asian: Mapped[int]
    american_indian: Mapped[int]
    pacific_islander: Mapped[int]
    other_race: Mapped[int]
    median_age: Mapped[int]
    total_households: Mapped[int]
    owner_occupied_homes_median_value: Mapped[int]
    per_capita_income: Mapped[int]
    median_household_income: Mapped[int]
    below_poverty_line: Mapped[int]
    foreign_born_population: Mapped[int]

class State(Base):
    __tablename__ = 'states'
    state: Mapped[str] = mapped_column(primary_key=True)
    total_population: Mapped[int]

    places: Mapped[List['Place']] = relationship(back_populates='state_')
    congressional_districts: Mapped[List['CongressionalDistrict']] = relationship(back_populates='state_')

class Place(Base):
    __tablename__ = 'places'
    place: Mapped[str] = mapped_column(primary_key=True)
    total_population: Mapped[int]
    state: Mapped[str] = mapped_column(ForeignKey("states.state"))
    
    state_: Mapped['State'] = relationship(back_populates='places')

class CongressionalDistrict(Base):
    __tablename__ = 'congressional_districts'
    
    congressional_district: Mapped[str] = mapped_column(primary_key=True)
    state: Mapped[str] = mapped_column(ForeignKey("states.state"))
    
    state_: Mapped['State'] = relationship(back_populates='congressional_districts')
with db.session_scope() as session:
    s = session.query(State).first()
    print(s,'\n')
    for place in s.places:
        print(place.name,'::',place.median_household_income)
<__main__.State object> 

Birmingham city, Alabama :: 32378
Dothan city, Alabama :: 44208
Hoover city, Alabama :: 77365
Huntsville city, Alabama :: 46769
Mobile city, Alabama :: 38678
Montgomery city, Alabama :: 41836
Tuscaloosa city, Alabama :: 44125
with db.session_scope() as session:
    result = session.query(State).limit(7).all()
    for state in result:
        print(
            state.name,
            len(state.places),
            len(state.congressional_districts)
        )
Alabama 7 7
Alaska 1 1
Arizona 16 9
Arkansas 6 4
California 137 53
Colorado 16 7
Connecticut 8 5

Pydantic Models

class ACSBase(BaseModel):
    model_config = ConfigDict(from_attributes=True)
    
    year: int = Field()
    name: str = Field()
    geo_id: str = Field()
    total_population: Optional[int] = Field(None)
    white: Optional[int] = Field(None)
    black: Optional[int] = Field(None)
    hispanic: Optional[int] = Field(None)
    asian: Optional[int] = Field(None)
    american_indian: Optional[int] = Field(None)
    pacific_islander: Optional[int] = Field(None)
    other_race: Optional[int] = Field(None)
    median_age: float = Field()
    total_households: Optional[int] = Field(None)
    owner_occupied_homes_median_value: int = Field()
    per_capita_income: int = Field()
    median_household_income: int = Field()
    below_poverty_line: Optional[int] = Field(None)
    foreign_born_population: Optional[int] = Field(None)    
    state: int = Field()

class PlaceModel(ACSBase):
    """A Model for a record from the 'places' table"""
    place: str

class CDModel(ACSBase):
    """A Model for a record from the 'congressional_districts' table"""
    congressional_district: str 

class StateModel(ACSBase):
    """A Model for a record from the 'states' table"""

    places: List[PlaceModel]
    congressional_districts: List[CDModel]
    
    @computed_field(return_type=float,title='People per District',)
    def avg_people_per_cd(self) -> float:
        return sum([cd.total_population for cd in self.congressional_districts]) / len(self.congressional_districts)
from IPython.display import JSON
# mode = serialization includes computed fields
JSON(StateModel.model_json_schema(mode='serialization'))
<IPython.core.display.JSON object>
from humble_database.data_model import DataModel
ACSDataModel = DataModel[StateModel]
JSON(ACSDataModel.model_json_schema(mode='serialization'))
<IPython.core.display.JSON object>
with db.session_scope() as session:
    orm_result = session.query(State).all()
    result = ACSDataModel(data=orm_result)

result
title: DataModel[StateModel] description: None DataFrame:
<style scoped> .dataframe tbody tr th:only-of-type { vertical-align: middle; }
.dataframe tbody tr th {
    vertical-align: top;
}

.dataframe thead th {
    text-align: right;
}
</style>
year name geo_id total_population white black hispanic asian american_indian pacific_islander ... total_households owner_occupied_homes_median_value per_capita_income median_household_income below_poverty_line foreign_born_population state places congressional_districts avg_people_per_cd
0 2015 Alabama 04000US01 4858979 3204076 1296681 192870 58918 19069 2566 ... 1846390 134100 44765 44765 876016.0 169972.0 1 [{'year': 2015, 'name': 'Birmingham city, Alab... [{'year': 2015, 'name': 'Congressional Distric... 694139.857143
1 2015 Alaska 04000US02 738432 452472 24739 51825 45753 98300 6341 ... 250185 259600 73355 73355 74532.0 58544.0 2 [{'year': 2015, 'name': 'Anchorage municipalit... [{'year': 2015, 'name': 'Congressional Distric... 738432.000000
2 2015 Arizona 04000US04 6828065 3802263 282718 2098411 210922 276132 9963 ... 2463008 194300 51492 51492 1159043.0 914400.0 4 [{'year': 2015, 'name': 'Avondale city, Arizon... [{'year': 2015, 'name': 'Congressional Distric... 711564.777778
3 2015 Arkansas 04000US05 2978204 2174934 466486 207743 41932 18221 7551 ... 1144663 120700 41995 41995 550508.0 142841.0 5 [{'year': 2015, 'name': 'Fayetteville city, Ar... [{'year': 2015, 'name': 'Congressional Distric... 695398.750000
4 2015 California 04000US06 39144818 14815122 2192844 15184545 5476958 135866 143408 ... 12896357 449100 64500 64500 5891678.0 10688336.0 6 [{'year': 2015, 'name': 'Alameda city, Califor... [{'year': 2015, 'name': 'Congressional Distric... 735426.811321

5 rows × 22 columns

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

humble-database-0.0.1.tar.gz (16.9 kB view details)

Uploaded Source

Built Distribution

If you're not sure about the file name format, learn more about wheel file names.

humble_database-0.0.1-py3-none-any.whl (13.9 kB view details)

Uploaded Python 3

File details

Details for the file humble-database-0.0.1.tar.gz.

File metadata

  • Download URL: humble-database-0.0.1.tar.gz
  • Upload date:
  • Size: 16.9 kB
  • Tags: Source
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/4.0.2 CPython/3.11.5

File hashes

Hashes for humble-database-0.0.1.tar.gz
Algorithm Hash digest
SHA256 40ffe67d5ea569a8531e9cad344728bf527ad6dbc2f4ffcf1376e0c04091de79
MD5 da04a230f1bd5114782d125a3b96ddd9
BLAKE2b-256 9b76aef01b4edb9df6aa8fc68ef9011034a38de5cf4ee7264c0cf32d17e3e28e

See more details on using hashes here.

File details

Details for the file humble_database-0.0.1-py3-none-any.whl.

File metadata

File hashes

Hashes for humble_database-0.0.1-py3-none-any.whl
Algorithm Hash digest
SHA256 cd71fbf220a5c318c859685f3cea6cf50b8c24b637a822d769973b72a5b40997
MD5 a80360e475ea8fc4d3b8cc6df217b127
BLAKE2b-256 fb3bbf38f71489dbae1cb685b65810fdc508824f7b19614358fdbbcb13b2c5ba

See more details on using hashes here.

Supported by

AWS Cloud computing and Security Sponsor Datadog Monitoring Depot Continuous Integration Fastly CDN Google Download Analytics Pingdom Monitoring Sentry Error logging StatusPage Status page