Run computations and export data
import decentriq_platform as dq
user_email = "@@ YOUR EMAIL HERE @@"
api_token = "@@ YOUR TOKEN HERE @@"
client = dq.create_client(user_email, api_token)
enclave_specs = dq.enclave_specifications.latest()
Setup Script
If you want to test this functionality and don't have a clean room already set up, you can use this script to create an appropriate environment to test the rest of this guide with.
import decentriq_platform as dq
from decentriq_platform.analytics import *
USER_EMAIL = "@@ YOUR EMAIL HERE @@"
API_TOKEN = "@@ YOUR TOKEN HERE @@"
client = dq.create_client(USER_EMAIL, API_TOKEN)
builder = AnalyticsDcrBuilder(client=client)
builder.\
with_name("My DCR").\
with_owner(USER_EMAIL).\
with_description("My test DCR")
builder.add_node_definition(
RawDataNodeDefinition(name="input_data_node", is_required=True)
)
columns = [
Column(
name="name",
format_type=FormatType.STRING,
is_nullable=False,
),
Column(
name="salary",
format_type=FormatType.FLOAT,
is_nullable=False,
),
]
builder.add_node_definition(
TableDataNodeDefinition(
name="salary_data", columns=columns, is_required=True
)
)
my_script_content = b"""
with open("/input/input_data_node", "r") as input_file:
input_data = input_file.read()
with open("/output/uppercase.txt", "w") as output_file:
output_file.write(input_data.upper())
"""
builder.add_node_definition(
PythonComputeNodeDefinition(
name="uppercase_text_node",
script=my_script_content,
dependencies=["input_data_node"]
)
)
my_query_content = """
SELECT SUM(salary) FROM salary_data
"""
builder.add_node_definition(
SqliteComputeNodeDefinition(
name="sum_salary",
query=my_query_content,
dependencies=["salary_data"]
)
)
builder.add_participant(
USER_EMAIL,
data_owner_of=["input_data_node", "salary_data"],
analyst_of=["uppercase_text_node", "sum_salary"]
)
dcr_definition = builder.build()
dcr = client.publish_analytics_dcr(dcr_definition)
import io
data = io.BytesIO("""Alice,10.0
Bob,5.0
John,14.0
""".encode())
data_node = dcr.get_node("salary_data")
data_node.upload_and_publish_dataset(data, key=dq.Key(), name="my_salary_data.csv")
data = io.BytesIO(b"hello world")
data_node = dcr.get_node("input_data_node")
data_node.upload_and_publish_dataset(data, dq.Key(), "my-data.txt")
DCR_ID = dcr.id
print(DCR_ID)
Run and retrieve tabular computation results
Computations that take a table as input and return a table as output are SQL and Synthetic data.
import decentriq_platform as dq
user_email = "@@ YOUR EMAIL HERE @@"
api_token = "@@ YOUR TOKEN HERE @@"
DCR_ID = "@@ YOUR ANALYTICS DCR ID HERE @@"
client = dq.create_client(user_email, api_token)
dcr = client.retrieve_analytics_dcr(DCR_ID)
sql_node = dcr.get_node("sum_salary")
results_csv = sql_node.run_computation_and_get_results_as_string()
print("Computation result:\n\n", results_csv)
Run and retrieve script computation results
Python and R computation nodes may take any input and produce any point. The download results are a zip file. The zip file will contain the contents of the entire /output
directory.
import decentriq_platform as dq
user_email = "@@ YOUR EMAIL HERE @@"
api_token = "@@ YOUR TOKEN HERE @@"
client = dq.create_client(user_email, api_token)
dcr = client.retrieve_analytics_dcr(DCR_ID)
python_node = dcr.get_node("uppercase_text_node")
results = python_node.run_computation_and_get_results_as_zip()
result_txt = results.read("uppercase.txt").decode()
print("Computation result:\n\n", result_txt)