= dmda.DomoTokenAuth(
token_auth =os.environ["DOMO_INSTANCE"],
domo_instance=os.environ["DOMO_ACCESS_TOKEN"],
domo_access_token
)
await token_auth.who_am_i()
= '04c1574e-c8be-4721-9846-c6ffa491144b' dataset_id
Dataset Routes
Query Datasets
QueryRequestError
QueryRequestError (res:domolibrary.client.ResponseGetData.ResponseGetDat a, sql, dataset_id, message=None)
base exception
Dataset_CRUDError
Dataset_CRUDError (dataset_id, res:domolibrary.client.ResponseGetData.ResponseGetData , message=None)
base exception
Dataset_GetError
Dataset_GetError (dataset_id, res:domolibrary.client.ResponseGetData.ResponseGetData, message=None)
base exception
DatasetNotFoundError
DatasetNotFoundError (dataset_id, res:domolibrary.client.ResponseGetData.ResponseGetD ata, message:str=None)
base exception
query_dataset_private
query_dataset_private (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, sql:str, loop_until_end:bool=False, limit=100, skip=0, maximum=100, filter_pdp_policy_id_ls:List[int]=None, timeout:int=10, session:Optional[httpx.AsyncClient]=None, debug_api:bool=False, parent_class=None, debug_loop:bool=False, debug_num_stacks_to_drop=1)
Type | Default | Details | |
---|---|---|---|
auth | DomoAuth | ||
dataset_id | str | ||
sql | str | ||
loop_until_end | bool | False | retrieve all available rows |
limit | int | 100 | maximum rows to return per request. refers to PAGINATION |
skip | int | 0 | |
maximum | int | 100 | equivalent to the LIMIT or TOP clause in SQL, the number of rows to return total |
filter_pdp_policy_id_ls | List | None | |
timeout | int | 10 | |
session | Optional | None | |
debug_api | bool | False | |
parent_class | NoneType | None | |
debug_loop | bool | False | |
debug_num_stacks_to_drop | int | 1 |
query_dataset_public
query_dataset_public (dev_auth:domolibrary.client.DomoAuth.DomoDeveloper Auth, dataset_id:str, sql:str, session:httpx.AsyncClient, debug_api:bool=False, parent_class:str=None, debug_num_stacks_to_drop=1)
query for hitting public apis, requires client_id and secret authentication
= f"SELECT * FROM TABLE"
sql
pd.DataFrame(
(await query_dataset_private(
=dataset_id,
dataset_id=token_auth,
auth=sql,
sql=0,
skip=10,
maximum# filter_pdp_policy_id_ls=[1225, 1226], # to apply pdp filter context
=False,
loop_until_end=True,
debug_api=30,
timeout
)
).response )
🐛 debugging get_data
{'body': {'sql': 'SELECT * FROM TABLE limit 10 offset 0'},
'function_name': 'get_traceback',
'headers': {'Connection': 'keep-alive',
'Content-Type': 'application/json',
'accept': 'application/json, text/plain',
'x-domo-developer-token': '83ece44f1451d4b581e1191f98cd411164f0b5b6ad2755b3'},
'method': 'POST',
'params': {'limit': 10, 'offset': 0},
'parent_class': None,
'url': 'https://domo-community.domo.com/api/query/v1/execute/04c1574e-c8be-4721-9846-c6ffa491144b'}
get_data: sending json
get_data_response <Response [200 OK]>
objectID | url | Title | article | views | created_dt | published_dt | |
---|---|---|---|---|---|---|---|
0 | 000004785 | https://domo-support.domo.com/s/article/360047... | Backing Up Workbench 4 Jobs | Important: Support for Workbench 4 ended on ... | 138 | 2022-10-24T22:30:00 | 2022-10-24T22:42:00 |
1 | 000004807 | https://domo-support.domo.com/s/article/360044... | Backing Up Workbench 5 Jobs | Backing up DataSet jobs is an often overlooked... | 47 | 2022-10-24T22:31:00 | 2022-10-24T22:41:00 |
2 | 000004785 | https://domo-support.domo.com/s/article/360047... | Backing Up Workbench 4 Jobs | Important: Support for Workbench 4 ended on ... | 139 | 2022-10-24T22:30:00 | 2022-10-24T22:42:00 |
3 | 000004081 | https://domo-support.domo.com/s/article/360043... | Beast Mode Functions Reference Guide | IntroYou can use this reference guide to learn... | 826 | 2022-10-24T21:20:00 | 2022-10-24T22:40:00 |
4 | 000004508 | https://domo-support.domo.com/s/article/360043... | Fun Sample DataSets | IntroIt's hard learning how to perform advance... | 365 | 2022-10-24T22:13:00 | 2022-10-24T22:39:00 |
5 | 000004073 | https://domo-support.domo.com/s/article/360042... | Sample Beast Mode Calculations: Date Transforms | IntroRefer to the calculations in this article... | 334 | 2022-10-24T21:20:00 | 2022-10-24T22:40:00 |
6 | 000005031 | https://domo-support.domo.com/s/article/360043... | Formatting Data Label and Hover Text in Your C... | Intro Data labels and hovers in charts have th... | 347 | 2022-11-02T21:00:00 | 2022-11-02T21:04:00 |
7 | 000004088 | https://domo-support.domo.com/s/article/360043... | Date Format Specifier Characters for Beast Mode | IntroIn a Beast Mode calculation using a DATE... | 258 | 2022-10-24T21:20:00 | 2022-10-24T22:42:00 |
8 | 000004785 | https://domo-support.domo.com/s/article/360047... | Backing Up Workbench 4 Jobs | Important: Support for Workbench 4 ended on ... | 138 | 2022-10-24T22:30:00 | 2022-10-24T22:42:00 |
9 | 000004807 | https://domo-support.domo.com/s/article/360044... | Backing Up Workbench 5 Jobs | Backing up DataSet jobs is an often overlooked... | 47 | 2022-10-24T22:31:00 | 2022-10-24T22:41:00 |
Dataset Properties
get_dataset_by_id
get_dataset_by_id (dataset_id:str, auth:Optional[domolibrary.client.DomoAuth.DomoAuth]=No ne, debug_api:bool=False, session:Optional[httpx.AsyncClient]=None, parent_class:str=None, debug_num_stacks_to_drop=1)
retrieve dataset metadata
Type | Default | Details | |
---|---|---|---|
dataset_id | str | dataset id from URL | |
auth | Optional | None | requires full authentication |
debug_api | bool | False | for troubleshooting API request |
session | Optional | None | |
parent_class | str | None | |
debug_num_stacks_to_drop | int | 1 | |
Returns | ResponseGetData | returns metadata about a dataset |
try:
await get_dataset_by_id(dataset_id=123, auth=token_auth)
except DatasetNotFoundError as e:
print(e)
🛑 DatasetNotFoundError 🛑 - function: get_traceback || 123 || status 404 || dataset - 123 not found at domo-community
await get_dataset_by_id(dataset_id=dataset_id, auth=token_auth)).response (
{'id': '04c1574e-c8be-4721-9846-c6ffa491144b',
'displayType': 'domo-jupyterdata',
'dataProviderType': 'domo-jupyterdata',
'type': 'Jupyter',
'name': 'domo_kbs',
'owner': {'id': '1893952720',
'name': 'Jae Wilson1',
'type': 'USER',
'group': False},
'status': 'SUCCESS',
'created': 1668379680000,
'lastTouched': 1738349729000,
'lastUpdated': 1738349729260,
'rowCount': 1185,
'columnCount': 7,
'cardInfo': {'cardCount': 2, 'cardViewCount': 0},
'properties': {'formulas': {'formulas': {'calculation_ca9d4b1c-f73a-4f76-9f94-d3c4ca6871c5': {'templateId': 2664,
'id': 'calculation_ca9d4b1c-f73a-4f76-9f94-d3c4ca6871c5',
'name': 'rowcount',
'formula': 'sum(1)',
'status': 'VALID',
'dataType': 'LONG',
'persistedOnDataSource': True,
'isAggregatable': True,
'bignumber': False,
'variable': False},
'calculation_38846559-d190-4ab1-809b-bcd361db5670': {'templateId': 2665,
'id': 'calculation_38846559-d190-4ab1-809b-bcd361db5670',
'name': 'max_views',
'formula': 'max(views)',
'status': 'VALID',
'dataType': 'LONG',
'persistedOnDataSource': True,
'isAggregatable': True,
'bignumber': False,
'columnPositions': [{'columnName': 'views', 'columnPosition': 4}],
'variable': False}}}},
'state': 'SUCCESS',
'validConfiguration': True,
'validAccount': True,
'streamId': 825,
'transportType': 'API',
'adc': False,
'adcExternal': False,
'masked': False,
'currentUserFullAccess': True,
'cloudId': 'domo',
'cloudName': 'Domo',
'permissions': 'READ_WRITE_DELETE_SHARE_ADMIN',
'hidden': False,
'tags': '["developer_documentation","hackercore"]',
'scheduleActive': True,
'cardCount': 2,
'cryoStatus': 'ADRENALINE',
'cloudEngine': 'domo'}
get_schema
get_schema (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None, session:httpx.AsyncClient=None)
retrieve the schema for a dataset
sample implementation of get_schema
= await get_schema(dataset_id=dataset_id, auth=token_auth)
res # retrieve schema from response
"tables")[0].get("columns")) pd.DataFrame(res.response.get(
name | id | type | visible | order | |
---|---|---|---|---|---|
0 | objectID | objectID | STRING | True | 0 |
1 | url | url | STRING | True | 0 |
2 | Title | Title | STRING | True | 0 |
3 | article | article | STRING | True | 0 |
4 | views | views | LONG | True | 0 |
5 | created_dt | created_dt | DATETIME | True | 0 |
6 | published_dt | published_dt | DATETIME | True | 0 |
alter_schema
alter_schema (auth:domolibrary.client.DomoAuth.DomoAuth, schema_obj:dict, dataset_id:str, debug_api:bool=False, parent_class:str=None, debug_num_stacks_to_drop:int=1, session=<httpx.AsyncClient object at 0x7f20209383d0>)
retrieve the schema for a dataset
= await get_schema(dataset_id=dataset_id, auth=token_auth)
schema_res
= schema_res.response["tables"][0]
schema_obj
await alter_schema(dataset_id=dataset_id, auth=token_auth, schema_obj=schema_obj)
## must index dataset after alter schema
# await index_dataset(dataset_id=os.environ["DOJO_DATASET_ID"], auth=token_auth )
ResponseGetData(status=200, response={'schemaId': 3}, is_success=True, parent_class=None)
Upload Data
overview
In the URL, parts refers to the multi-part API and is unrelated to the partitions concept. The multi-part API was designed to allow sending multiple streams of Data into a data_version simultaneously.
In stage 1, the values passed in the Body will be superseded by values in the COMMIT (stage 3), so best practices is to not populate values here.
The response includes an uploadId, which must be stored and passed to the URL of the subsequent upload request (stages 2 and 3).
url params
The dataTag parameter allows users to UPDATE or REPLACE a datatag (partition)
NOTE: restateDataTag is largely deprecated // exists for backward compatibility
body params
The appendId parameter accepts “latest” or “None”
latest will APPEND the data version to the dataset
UploadDataError
UploadDataError (stage_num:int, dataset_id:str, res:domolibrary.client.ResponseGetData.ResponseGetData)
raise if unable to upload data to Domo
upload_dataset_stage_1
upload_dataset_stage_1 (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, partition_tag:str=None, session:Optional[httpx.AsyncClient]=None, debug_api:bool=False, return_raw:bool=False, debug_num_stacks_to_drop=1, parent_class=None)
preps dataset for upload by creating an upload_id (upload session key) pass to stage 2 as a parameter
Type | Default | Details | |
---|---|---|---|
auth | DomoAuth | ||
dataset_id | str | ||
partition_tag | str | None | synonymous with data_tag |
session | Optional | None | |
debug_api | bool | False | |
return_raw | bool | False | |
debug_num_stacks_to_drop | int | 1 | |
parent_class | NoneType | None | |
Returns | ResponseGetData |
= "2023-04-27"
partition_key
await upload_dataset_stage_1(
=token_auth, dataset_id=dataset_id, partition_tag=partition_key, debug_api=False
auth )
ResponseGetData(status=200, response=21, is_success=True, parent_class=None)
upload_dataset_stage_2_file
upload_dataset_stage_2_file (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, upload_id:str, data_file:Optional[_io.TextIOWrapper]=None, session:Optional[httpx.AsyncClient]=None, part_id:str=2, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None)
Type | Default | Details | |
---|---|---|---|
auth | DomoAuth | ||
dataset_id | str | ||
upload_id | str | must originate from a stage_1 upload response | |
data_file | Optional | None | |
session | Optional | None | |
part_id | str | 2 | only necessary if streaming multiple files into the same partition (multi-part upload) |
debug_api | bool | False | |
debug_num_stacks_to_drop | int | 1 | |
parent_class | NoneType | None | |
Returns | ResponseGetData |
upload_dataset_stage_2_df
upload_dataset_stage_2_df (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, upload_id:str, upload_df:pandas.core.frame.DataFrame, session:Optional[httpx.AsyncClient]=None, part_id:str=2, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None)
Type | Default | Details | |
---|---|---|---|
auth | DomoAuth | ||
dataset_id | str | ||
upload_id | str | must originate from a stage_1 upload response | |
upload_df | DataFrame | ||
session | Optional | None | |
part_id | str | 2 | only necessary if streaming multiple files into the same partition (multi-part upload) |
debug_api | bool | False | |
debug_num_stacks_to_drop | int | 1 | |
parent_class | NoneType | None | |
Returns | ResponseGetData |
upload_dataset_stage_3
upload_dataset_stage_3 (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, upload_id:str, update_method:str='REPLACE', partition_tag:str=None, is_index:bool=False, session:httpx.AsyncClient=None, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None)
commit will close the upload session, upload_id. this request defines how the data will be loaded into Adrenaline, update_method has optional flag for indexing dataset.
Type | Default | Details | |
---|---|---|---|
auth | DomoAuth | ||
dataset_id | str | ||
upload_id | str | must originate from a stage_1 upload response | |
update_method | str | REPLACE | accepts REPLACE or APPEND |
partition_tag | str | None | synonymous with data_tag |
is_index | bool | False | index after uploading |
session | AsyncClient | None | restate_data_tag: str = None, # deprecated |
debug_api | bool | False | |
debug_num_stacks_to_drop | int | 1 | |
parent_class | NoneType | None | |
Returns | ResponseGetData |
= dmda.DomoTokenAuth(
community_auth ="domo-community",
domo_instance=os.environ["DOMO_DOJO_ACCESS_TOKEN"],
domo_access_token
)
= "cbae0e0c-a92d-4a4c-8d0c-c9ccd38fe928"
ds_id
= pd.DataFrame([{"col_a": "a", "col_b": "b", "col_c": "c"}])
df
= httpx.AsyncClient()
session
= await upload_dataset_stage_1(
s1_res =community_auth,
auth=ds_id,
dataset_id=None,
partition_tag=False,
debug_api=session,
session
)
= s1_res.response
upload_id
= await upload_dataset_stage_2_df(
s2_res =community_auth,
auth=ds_id,
dataset_id=upload_id,
upload_id=df,
upload_df=2,
part_id=False,
debug_api=session,
session
)
= await upload_dataset_stage_3(
s3_res =community_auth,
auth=ds_id,
dataset_id=upload_id,
upload_id="REPLACE", # accepts REPLACE or APPEND
update_method=True, # index after uploading
is_index=session,
session
)
await session.aclose()
s3_res.is_success
True
index_dataset
index_dataset (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, session:httpx.AsyncClient=None, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None)
manually index a dataset
index_status
index_status (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, index_id:str, session:httpx.AsyncClient=None, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None)
get the completion status of an index
Working with Partitions
list_partitions
list_partitions (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, body:dict=None, session:httpx.AsyncClient=None, debug_api:bool=False, debug_loop:bool=False, debug_num_stacks_to_drop=2, parent_class=None)
generate_list_partitions_body
generate_list_partitions_body (limit=100, offset=0)
pd.DataFrame(await list_partitions(auth=token_auth, dataset_id=dataset_id)).response[0:5]
( )
create
create (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_name:str, dataset_type:str='api', schema:dict=None, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None, session:httpx.AsyncClient=None)
generate_create_dataset_body
generate_create_dataset_body (dataset_name:str, dataset_type:str='API', schema:dict=None)
sample implementation of create dataset
# # | eval : false
# token_auth = dmda.DomoTokenAuth(
# domo_instance=os.environ['DOMO_INSTANCE'],
# domo_access_token=os.environ["DOMO_ACCESS_TOKEN"],
# )
# await create(dataset_name = 'hello world', dataset_type = 'api', auth = token_auth)
Create RDS output
@gd.route_function
async def create_rds_dataset(
auth: dmda.DomoAuth,str,
dataset_name: str="RDS output created by domolibrary",
dataset_description: bool = False,
debug_api: =1,
debug_num_stacks_to_drop=None,
parent_class= None,
session: httpx.AsyncClient
):= {
body "dataSourceName": dataset_name,
"dataSourceDescription": dataset_description,
"dataSourceType": "ObservabilityMetrics",
"schema": {
"columns": [
{"type": "STRING",
"name": "Remote Domo Stats"
}
]
}
}
= f"https://{auth.domo_instance}.domo.com/api/executor/v1/datasets"
url
= await gd.get_data(
res =auth,
auth="POST",
method=url,
url=body,
body=session,
session=debug_api,
debug_api=debug_num_stacks_to_drop,
num_stacks_to_drop=parent_class,
parent_class
)
if not res.is_success:
raise Dataset_CRUDError(
=dataset_id,
dataset_id=auth.domo_instance,
domo_instance=res.status,
status=parent_class,
parent_class=res.traceback_details.function_name,
function_name=res.response,
message
)
return res
# # | eval : false
= dmda.DomoTokenAuth(
token_auth =os.environ['DOMO_INSTANCE'],
domo_instance=os.environ["DOMO_ACCESS_TOKEN"],
domo_access_token
)
#res = await create_rds_dataset(auth=token_auth, dataset_name="RDS_test")
#print (res.response)
delete_partition_stage_1
delete_partition_stage_1 (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, dataset_partition_id:str, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None, session:httpx.AsyncClient=None)
Delete partition has 3 stages # Stage 1. This marks the data version associated with the partition tag as deleted. It does not delete the partition tag or remove the association between the partition tag and data version. There should be no need to upload an empty file – step #3 will remove the data from Adrenaline. # update on 9/9/2022 based on the conversation with Greg Swensen
delete_partition_stage_2
delete_partition_stage_2 (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, dataset_partition_id:str, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None, session:httpx.AsyncClient=None)
This will remove the partition association so that it doesn’t show up in the list call. Technically, this is not required as a partition against a deleted data version will not count against the 400 partition limit but as the current partitions api doesn’t make that clear, cleaning these up will make it much easier for you to manage.
delete
delete (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None, session:httpx.AsyncClient=None)
get_permissions
get_permissions (auth:domolibrary.client.DomoAuth.DomoAuth, dataset_id:str, debug_api:bool=False, debug_num_stacks_to_drop=1, parent_class=None, session:httpx.AsyncClient=None)
retrieve the schema for a dataset
await get_permissions(auth=token_auth, dataset_id=dataset_id)).response (
{'list': [{'type': 'GROUP',
'id': '1063525969',
'accessLevel': 'CAN_EDIT',
'name': 'Power User Group'},
{'type': 'USER',
'id': '1898323170',
'accessLevel': 'CAN_SHARE',
'name': 'Creed Smith'},
{'type': 'USER',
'id': '838842812',
'accessLevel': 'CAN_EDIT',
'name': 'test_pdp3@sony.com - via dl 2023-05-05'},
{'type': 'GROUP',
'id': '49793884',
'accessLevel': 'CAN_SHARE',
'name': 'ADM | Orientation'},
{'type': 'USER',
'id': '1893952720',
'accessLevel': 'OWNER',
'name': 'Jae Wilson1'},
{'type': 'USER',
'id': '308783524',
'accessLevel': 'CAN_EDIT',
'name': 'test1'},
{'type': 'USER',
'id': '1774387618',
'accessLevel': 'CAN_SHARE',
'name': 'Cadell Falconer'},
{'type': 'USER',
'id': '1681443709',
'accessLevel': 'CAN_EDIT',
'name': 'test 3 - updated 2024-08-23'},
{'type': 'USER',
'id': '1865099622',
'accessLevel': 'CAN_EDIT',
'name': 'Ben Schein'},
{'type': 'USER',
'id': '1079474652',
'accessLevel': 'CAN_EDIT',
'name': 'Jace McLean'},
{'type': 'USER',
'id': '27',
'accessLevel': 'CAN_SHARE',
'name': 'DomoSupport'},
{'type': 'USER',
'id': '918118628',
'accessLevel': 'CAN_VIEW',
'name': 'Jason Altenburg'},
{'type': 'USER',
'id': '68216396',
'accessLevel': 'CAN_EDIT',
'name': 'Elliott Leonard'},
{'type': 'USER',
'id': '987822803',
'accessLevel': 'CAN_EDIT',
'name': 'test_pdp2@sony.com - via dl 2023-05-05'},
{'type': 'USER',
'id': '1216550715',
'accessLevel': 'CAN_EDIT',
'name': '8:26 - go to sleep'},
{'type': 'USER',
'id': '1186995515',
'accessLevel': 'CAN_EDIT',
'name': 'test_pdp1@sony.com - via dl 2023-05-05'},
{'type': 'USER',
'id': '1345408759',
'accessLevel': 'CAN_SHARE',
'name': 'Alexis Lorenz (DataMaven)'}],
'totalUserCount': 15,
'totalGroupCount': 2}