Skip to main content

Apeman DAL service client SDK

Project description

This library enables you to interact with DAL service to save and load your data

publish:

python -m build
twine upload --repository-url https://test.pypi.org/legacy/ dist/*

install: pip install apeman-dal-client==0.1.0

How to use

export apeman_dal_server_addr='localhost:9090'
Create Datafeed
from apeman.dal.tsdb import DataAmount, DataType
from apeman.dal.tsdb import ApemanDalTsdbClient
from apeman.dal.tsdb import ColumnDefinitionBuilder, CreateDatafeedRequestBuilder, \
     UniqueKeyBuilder

# build create datafeed request
tag = ColumnDefinitionBuilder() \
            .set_name(name='tag_nae') \
            .set_non_null(non_null=True) \
            .set_type(data_type=DataType.TEXT) \
            .set_limit(limit=100).build()

field = ColumnDefinitionBuilder() \
            .set_name(name='field_name') \
            .set_non_null(non_null=True) \
            .set_type(data_type=DataType.TEXT) \
            .set_limit(limit=100).build()

unique_key = UniqueKeyBuilder().add(name='tag_name').build()
unique_key_2 = UniqueKeyBuilder().add(name='field_name').build()

data_amount = DataAmount.SMALL

create_datafeed_request = CreateDatafeedRequestBuilder() \
            .set_name(name='test') \
            .add_tag(tag=tag) \
            .add_field(field=field) \
            .add_unique_key(unique_key=unique_key) \
            .add_unique_key(unique_key=unique_key_2) \
            .set_data_amount(data_amount=data_amount) \
            .build()

client = ApemanDalTsdbClient()
# create datafeed
client.create_datafeed(request=create_datafeed_request)

#
Delete Datafeed
from apeman.dal.tsdb import ApemanDalTsdbClient

client = ApemanDalTsdbClient()
# delete datafeed
client.delete_datafeed(datafeed='test')
Get Datafeed
from apeman.dal.tsdb import ApemanDalTsdbClient

client = ApemanDalTsdbClient()
# delete datafeed
client.get_datafeed(datafeed='test')
List Datafeed
from apeman.dal.tsdb import ApemanDalTsdbClient

client = ApemanDalTsdbClient()
# delete datafeed
client.list_datafeed(query_filter='')
Put Data
from apeman.dal.tsdb import ApemanDalTsdbClient
from apeman.dal.tsdb import DataType
from apeman.dal.tsdb.proto_builder import TabularDataBuilder, ColumnMetaBuilder, TupleBuilder, ValBuilder
import time

client = ApemanDalTsdbClient()

column_a = ColumnMetaBuilder()  \
                .set_name(name='column_a') \
                .set_data_type(data_type=DataType.TEXT) \
                .build()
column_b = ColumnMetaBuilder()  \
                .set_name(name='column_b')  \
                .set_data_type(data_type=DataType.BIGINT) \
                .build()

test_val = ValBuilder().set_ts(epoch_in_millis=int(time.time()*1000)).build()

tuple_a = TupleBuilder() \
                .add_val(val = ValBuilder().set_i32(1000).build()) \
                .add_val(val = ValBuilder().set_i32(1000).build()) \
                .add_val(val = ValBuilder().set_i32(1000).build()) \
                .add_val(val = ValBuilder().set_i32(1000).build()) \
                .build()

tuple_b = TupleBuilder() \
                .add_val(val = ValBuilder().set_i32(1000).build()) \
                .add_val(val = ValBuilder().set_i32(1000).build()) \
                .add_val(val = ValBuilder().set_i32(1000).build()) \
                .add_val(val = ValBuilder().set_i32(1000).build()) \
                .build()

data = TabularDataBuilder()  \
                .add_column(column=column_a) \
                .add_column(column=column_b) \
                .add_tuple(tuple=tuple_a) \
                .add_tuple(tuple=tuple_b) \
                .build()

client.put_data(datafeed='test', data=data)
Get Data
from apeman.dal.tsdb import ApemanDalTsdbClient
from apeman.dal.tsdb.proto_builder import  OutputColumnBuilder, GetDataRequestBuilder


client = ApemanDalTsdbClient()

output_column_builder = OutputColumnBuilder().set_expr(expr='test').set_alias(alias='column_a')
column_a = output_column_builder.build()

output_column_builder = OutputColumnBuilder().set_expr(expr='test').set_alias(alias='column_b')
column_b = output_column_builder.build()

request = GetDataRequestBuilder()  \
                .add_column(column=column_a)  \
                .add_column(column=column_b)  \
                .set_datafeed(datafeed='test')  \
                .set_where(where='')  \
                .set_group_by(group_by='')  \
                .set_having(having='')  \
                .set_order_by(order_by='')  \
                .set_offset(offset=0)  \
                .set_limit(limit=100)  \
                .build()

client.get_data(request=request)

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distribution

apeman-dal-client-0.0.15.tar.gz (9.7 kB view details)

Uploaded Source

Built Distribution

apeman_dal_client-0.0.15-py3-none-any.whl (10.6 kB view details)

Uploaded Python 3

File details

Details for the file apeman-dal-client-0.0.15.tar.gz.

File metadata

  • Download URL: apeman-dal-client-0.0.15.tar.gz
  • Upload date:
  • Size: 9.7 kB
  • Tags: Source
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/4.0.2 CPython/3.11.2

File hashes

Hashes for apeman-dal-client-0.0.15.tar.gz
Algorithm Hash digest
SHA256 f60bae7d1dec24f0ed8bcb2a968fbae4804872f952653875364facb9be07f939
MD5 7289f2a6382d89a7ede13e2749c7a12f
BLAKE2b-256 fe37819d2af31126294dac8d7b05a7ee78b5fb75baadb26f14683a425eca5ddd

See more details on using hashes here.

File details

Details for the file apeman_dal_client-0.0.15-py3-none-any.whl.

File metadata

File hashes

Hashes for apeman_dal_client-0.0.15-py3-none-any.whl
Algorithm Hash digest
SHA256 dca82a63dafecf40f8de2656946819509835dddfe1d1fcea738da92472a8f699
MD5 001f70ad65766f2369a1dcc8bad318ee
BLAKE2b-256 5e2022131784c7c277c5c61961aeeace0ecba9e5c06db06981288a091c926681

See more details on using hashes here.

Supported by

AWS AWS Cloud computing and Security Sponsor Datadog Datadog Monitoring Fastly Fastly CDN Google Google Download Analytics Microsoft Microsoft PSF Sponsor Pingdom Pingdom Monitoring Sentry Sentry Error logging StatusPage StatusPage Status page