Snowflake connection

IP RESTRICTIONS

Make sure to allow traffic from one of the Paradime IPs in your firewall depending on the data location selected.

👉 See also: Paradime IP addresses.

1. Snowflake setup guide

Create a Snowflake role and user

Generate a password for the Paradime user and replace the placeholder text <your_generated_password_here> in the step below.

-- 1.### create user and role

use role useradmin;
create user if not exists paradime_cost_user
  password = '<your_generated_password_here>'
  default_role = paradime_cost_role
  default_warehouse = paradime_warehouse
  comment = 'Used by paradime.io for cost analytics';

create role if not exists paradime_cost_role comment = 'Used by paradime.io for cost analytics';
grant role paradime_cost_role to role sysadmin;
grant role paradime_cost_role to user paradime_cost_user;

Create a Snowflake warehouse

-- 2.### create warehouse

use role sysadmin;
create warehouse if not exists paradime_warehouse
  warehouse_size=xsmall
  auto_suspend=60
  initially_suspended=true
  comment = 'Used by paradime.io for cost analytics';
  
grant monitor, operate, usage, modify
  on warehouse paradime_warehouse to role paradime_cost_role;

Create S3 integration

Before running this command, you will need to update STORAGE_AWS_ROLE_ARN and STORAGE_ALLOWED_LOCATIONS with your own region and company token.

You can find the region and company token in the Paradime workspace settings page.

-- 3.### Create S3 integration

use role accountadmin;

create or replace storage integration PARADIME_S3_STORAGE_INTEGRATION
    type = external_stage
    storage_provider = 'S3'
    enabled = true
    STORAGE_AWS_ROLE_ARN = 'arn:aws:iam::817727247863:role/paradime-snowflake-role-<your_paradime_region>-<your_company_token>'  -- replace company token and region before running this command
    STORAGE_ALLOWED_LOCATIONS = ('s3://paradime-s3-<your_company_token>-<your_paradime_region>/snowflake/');  -- replace company token and region before running this command
    
grant usage on integration PARADIME_S3_STORAGE_INTEGRATION to role paradime_cost_role;

Create a new database and schema for Paradime

-- 4. ### create paradime db and schemas and grant access

use role accountadmin;

create or replace database PARADIME_COST_ANALYTICS;

create or replace schema PARADIME_COST_ANALYTICS.CORE;

grant usage on database  PARADIME_COST_ANALYTICS to role paradime_cost_role;

grant usage on schema  PARADIME_COST_ANALYTICS.CORE to role paradime_cost_role;

Create a new Snowflake file format


-- 5.### Create fileformat and grant privileges

use role accountadmin;

create or replace file format PARADIME_COST_ANALYTICS.CORE.PARADIME_JSON_FORMAT
    type = JSON
    NULL_IF = ();

grant all privileges on file format PARADIME_COST_ANALYTICS.CORE.PARADIME_JSON_FORMAT to paradime_cost_role;

Grant access to Snowflake metadata to the Paradime user

-- 6.### grant access to Snowflake metadata

use role accountadmin;

grant imported privileges on database snowflake to role paradime_cost_role;

Enable the Paradime user to manage and monitor Snowflake Warehouses

-- 7.### grant access to monitor warehouses

use role accountadmin;

grant monitor usage on account to role paradime_cost_role;


--- ### grant paradime the access to manage warehouses
use role accountadmin;

grant manage warehouses on account to role paradime_cost_role;

Full script here 👇

Snowflake setup guide script
-- 1.### create user and role

use role useradmin;
create user if not exists paradime_cost_user
  password = '<you_generated_password_here>'
  default_role = paradime_cost_role
  default_warehouse = paradime_warehouse
  comment = 'Used by paradime.io for cost analytics';

create role if not exists paradime_cost_role comment = 'Used by paradime.io for cost analytics';
grant role paradime_cost_role to role sysadmin;
grant role paradime_cost_role to user paradime_cost_user;

-- 2.### create warehouse

use role sysadmin;
create warehouse if not exists paradime_warehouse
  warehouse_size=xsmall
  auto_suspend=60
  initially_suspended=true
  comment = 'Used by paradime.io for cost analytics';
  
grant monitor, operate, usage, modify
  on warehouse paradime_warehouse to role paradime_cost_role;
  
-- 3.### Create S3 integration

use role accountadmin;

create or replace storage integration PARADIME_S3_STORAGE_INTEGRATION
    type = external_stage
    storage_provider = 'S3'
    enabled = true
    STORAGE_AWS_ROLE_ARN = 'arn:aws:iam::817727247863:role/paradime-snowflake-role-<your_paradime_region>-<your_company_token>'  -- replace company token and region before running this command
    STORAGE_ALLOWED_LOCATIONS = ('s3://paradime-s3-<your_company_token>-<your_paradime_region>/snowflake/');  -- replace company token and region before running this command
    
grant usage on integration PARADIME_S3_STORAGE_INTEGRATION to role paradime_cost_role;

-- 4. ### create paradime db and schemas and grant access

use role accountadmin;

create or replace database PARADIME_COST_ANALYTICS;

create or replace schema PARADIME_COST_ANALYTICS.CORE;

grant usage on database  PARADIME_COST_ANALYTICS to role paradime_cost_role;

grant usage on schema  PARADIME_COST_ANALYTICS.CORE to role paradime_cost_role;


-- 5.### Create fileformat and grant privileges

use role accountadmin;

create or replace file format PARADIME_COST_ANALYTICS.CORE.PARADIME_JSON_FORMAT
    type = JSON
    NULL_IF = ();

grant all privileges on file format PARADIME_COST_ANALYTICS.CORE.PARADIME_JSON_FORMAT to paradime_cost_role;

-- 6.### grant access to Snowflake metadata

use role accountadmin;

grant imported privileges on database snowflake to role paradime_cost_role;

-- 7.### grant access to monitor warehouses

use role accountadmin;

grant monitor usage on account to role paradime_cost_role;


--- ### grant paradime the access to manage warehouses
use role accountadmin;

grant manage warehouses on account to role paradime_cost_role;

2. dbt™️ project setup guide

Create a macro to manage query comments

To enable Paradime to enrich your Snowflake queries with additional metadata you will need to create a new dbt™️ macro called get_query_comment.sql in the macros folder of your project.

get_query_comment.sql dbt™️ macro
get_query_comment.sql
{% macro get_query_comment(node, extra = {}) %}
    {%- set comment_dict = extra -%}
    {%- do comment_dict.update(
        app='dbt',
        dbt_version=dbt_version,
        project_name=project_name,
        target_name=target.name,
        target_database=target.database,
        target_schema=target.schema,
        invocation_id=invocation_id
    ) -%}

    {%- if node is not none -%}
        {%- do comment_dict.update(
            node_name=node.name,
            node_alias=node.alias,
            node_package_name=node.package_name,
            node_original_file_path=node.original_file_path,
            node_database=node.database,
            node_schema=node.schema,
            node_id=node.unique_id,
            node_resource_type=node.resource_type,
            node_meta=node.config.meta,
            node_tags=node.tags,
            full_refresh=flags.FULL_REFRESH,
            which=flags.WHICH,
        ) -%}

        {%- if flags.INVOCATION_COMMAND -%}
            {%- do comment_dict.update(
                invocation_command=flags.INVOCATION_COMMAND
            ) -%}
        {%- endif -%}

        {%- if node.resource_type != ('seed') -%} {# Otherwise this throws an error saying 'Seeds cannot depend on other nodes.' #}
            {%- if node.refs is defined -%}
                {% set refs = [] %}
                {% for ref in node.refs %}
                    {%- if dbt_version >= '1.5.0' -%}
                        {%- do refs.append(ref.name) -%}
                    {%- else -%}
                        {%- do refs.append(ref[0]) -%}
                    {%- endif -%}
                {% endfor %}
                {%- do comment_dict.update(
                    node_refs=refs | unique | list
                ) -%}
            {%- endif -%}
        {%- endif -%}
        {%- if node.resource_type == 'model' -%}
            {%- do comment_dict.update(
                materialized=node.config.materialized,
            ) -%}
        {%- endif -%}
    {%- endif -%}

--- only if using dbt cloud

    {%- if env_var('DBT_CLOUD_PROJECT_ID', False) -%}
    {%- do comment_dict.update(
        dbt_cloud_project_id=env_var('DBT_CLOUD_PROJECT_ID')
    ) -%}
    {%- endif -%}

    {%- if env_var('DBT_CLOUD_JOB_ID', False) -%}
    {%- do comment_dict.update(
        dbt_cloud_job_id=env_var('DBT_CLOUD_JOB_ID')
    ) -%}
    {%- endif -%}

    {%- if env_var('DBT_CLOUD_RUN_ID', False) -%}
    {%- do comment_dict.update(
        dbt_cloud_run_id=env_var('DBT_CLOUD_RUN_ID')
    ) -%}
    {%- endif -%}

    {%- if env_var('DBT_CLOUD_RUN_REASON_CATEGORY', False) -%}
    {%- do comment_dict.update(
        dbt_cloud_run_reason_category=env_var('DBT_CLOUD_RUN_REASON_CATEGORY')
    ) -%}
    {%- endif -%}

    {%- if env_var('DBT_CLOUD_RUN_REASON', False) -%}
    {%- do comment_dict.update(
        dbt_cloud_run_reason=env_var('DBT_CLOUD_RUN_REASON')
    ) -%}
    {%- endif -%}

-- only if using bolt scheduler
    {%- if env_var('PARADIME_SCHEDULE_NAME', False) -%}
    {%- do comment_dict.update(
        paradime_schedule_name=env_var('PARADIME_SCHEDULE_NAME')
    ) -%}
    {%- endif -%}

    {%- if env_var('PARADIME_SCHEDULE_RUN_ID', False) -%}
    {%- do comment_dict.update(
        paradime_schedule_run_id=env_var('PARADIME_SCHEDULE_RUN_ID')
    ) -%}
    {%- endif -%}

    {%- if env_var('PARADIME_SCHEDULE_RUN_START_DTTM', False) -%}
    {%- do comment_dict.update(
        paradime_schedule_run_start_dttm=env_var('PARADIME_SCHEDULE_RUN_START_DTTM')
    ) -%}
    {%- endif -%}

    {%- if env_var('PARADIME_SCHEDULE_TRIGGER', False) -%}
    {%- do comment_dict.update(
        paradime_schedule_trigger=env_var('PARADIME_SCHEDULE_TRIGGER')
    ) -%}
    {%- endif -%}

    {%- if env_var('PARADIME_SCHEDULE_GIT_SHA', False) -%}
    {%- do comment_dict.update(
        paradime_schedule_git_sha=env_var('PARADIME_SCHEDULE_GIT_SHA')
    ) -%}
    {%- endif -%}

    {{ return(tojson(comment_dict)) }}
{% endmacro %}

Update your dbt_project.yml file

This step ensures that with each dbt™️ run, the query comment is appended to the query running in Snowflake.

dbt_project.yml
query-comment:
  comment: '{{ get_query_comment(node) }}'
  append: true

3. Configured Cost connection in Paradime

From the account setting page of your Paradime workspace, add your new Snowflake Cost connection using the credentials below:

  • You Snowflake Account Identifier

  • The Paradime Database created during the Snowflake Setup. This is named as: PARADIME_COST_ANALYTICS

  • The Paradime cost username and password, where the username is paradime_cost_user, and the password generated during the setup.

Last updated