Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from airflow.sensors.filesystem import FileSensor
- from datetime import datetime
- from airflow import DAG
- from airflow.sensors.filesystem import FileSensor
- from airflow.utils.task_group import TaskGroup
- from airflow.operators.sql import SQLCheckOperator, SQLValueCheckOperator
- from airflow.providers.postgres.hooks.postgres import PostgresHook
- default_args = {
- "start_date": datetime(2020, 1, 1),
- "owner": "airflow"
- }
- with DAG(
- dag_id="Sprin4_Task1",
- schedule_interval="@daily",
- default_args=default_args,
- catchup=False
- ) as dag:
- order_log_value_check = SQLValueCheckOperator(
- task_id='user_order_log_distinct_customer_check',
- sql="select count(DISTINCT customer_id) from user_order_log",
- pass_value=3,
- tolerance=0.01
- )
- activity_log_value_check = SQLValueCheckOperator(
- task_id='user_activity_log_distinct_customer_check',
- sql="select count(DISTINCT customer_id) from user_activity_log",
- pass_value=3,
- tolerance=0.01
- )
- order_log_value_check >> activity_log_value_check
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement