Skip to content

Commit

Permalink
Merge pull request #128 from implydata/revert-123-test-gha
Browse files Browse the repository at this point in the history
Revert "Add github action for testing notebooks"
  • Loading branch information
petermarshallio authored Oct 31, 2024
2 parents e707b66 + 3964644 commit 8b86086
Show file tree
Hide file tree
Showing 10 changed files with 9 additions and 154 deletions.
54 changes: 0 additions & 54 deletions .github/workflows/actions.yml

This file was deleted.

1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,3 @@
.ipynb_checkpoints
.ipynb_checkpoints/*
.DS_Store
output.ipynb
4 changes: 2 additions & 2 deletions notebooks/02-ingestion/02-batch-ingestion.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -588,7 +588,7 @@
"metadata": {},
"outputs": [],
"source": [
"druid.datasources.drop(\"example-kttm-transform-batch\", True)"
"druid.datasources.drop(table_name, True)"
]
},
{
Expand Down Expand Up @@ -626,7 +626,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.7"
"version": "3.11.6"
}
},
"nbformat": 4,
Expand Down
7 changes: 2 additions & 5 deletions notebooks/02-ingestion/12-spatial-dimensions.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@
"source": [
"import druidapi\n",
"import os\n",
"import json\n",
"\n",
"if 'DRUID_HOST' not in os.environ.keys():\n",
" druid_host=f\"http://localhost:8888\"\n",
Expand Down Expand Up @@ -236,6 +235,8 @@
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"\n",
"spatial_index_spec = {\n",
" \"type\": \"index_parallel\",\n",
" \"spec\": {\n",
Expand Down Expand Up @@ -364,10 +365,6 @@
"metadata": {},
"outputs": [],
"source": [
"import time\n",
"\n",
"time.sleep(100) # Give previous cell some time to complete\n",
"\n",
"rectangular_filter_query = {\n",
" \"queryType\": \"topN\",\n",
" \"dataSource\": {\n",
Expand Down
3 changes: 0 additions & 3 deletions notebooks/02-ingestion/13-native-transforms.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -767,9 +767,6 @@
"metadata": {},
"outputs": [],
"source": [
"import time\n",
"time.sleep(100) # Give previous cell some time to complete\n",
"\n",
"time_now = datetime.now().strftime('%Y-%m-%dT%H:%M:%S')\n",
"\n",
"sql=f'''\n",
Expand Down
7 changes: 2 additions & 5 deletions notebooks/03-query/07-functions-datetime.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,7 @@
"source": [
"import matplotlib\n",
"import matplotlib.pyplot as plt\n",
"import pandas as pd\n",
"import time"
"import pandas as pd"
]
},
{
Expand Down Expand Up @@ -437,8 +436,6 @@
"metadata": {},
"outputs": [],
"source": [
"time.sleep(100) # Give previous cell some time to complete\n",
"\n",
"sql='''\n",
"SELECT\n",
" __time AS \"start\",\n",
Expand All @@ -449,7 +446,7 @@
"LIMIT 10\n",
"'''\n",
"\n",
"display.sql(sql) "
"display.sql(sql)"
]
},
{
Expand Down
7 changes: 2 additions & 5 deletions notebooks/03-query/08-functions-strings.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,7 @@
"source": [
"import matplotlib\n",
"import matplotlib.pyplot as plt\n",
"import pandas as pd\n",
"import time"
"import pandas as pd"
]
},
{
Expand Down Expand Up @@ -550,8 +549,6 @@
"metadata": {},
"outputs": [],
"source": [
"time.sleep(100) # Give previous cell some time to complete\n",
"\n",
"sql='''\n",
"SELECT\n",
" TRIM(LEADING 'X' FROM \"XXXXXcountryXXXXX\") AS \"leadingTrim\",\n",
Expand Down Expand Up @@ -747,7 +744,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.7"
"version": "3.12.3"
}
},
"nbformat": 4,
Expand Down
3 changes: 1 addition & 2 deletions notebooks/03-query/19-groupby-earliest.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -141,10 +141,9 @@
" \"country\",\n",
" \"loaded_image\",\n",
" \"os\",\n",
" \"session_length\",\n",
" LATEST_BY(\"session_length\",TIME_PARSE(\"timestamp\")) \"latest_session_length\"\n",
"FROM \"ext\"\n",
"GROUP BY 1,2,3,4,5,6,7,8\n",
"GROUP BY 1,2,3,4,5,6,7\n",
"PARTITIONED BY DAY\n",
"'''\n",
"\n",
Expand Down
59 changes: 0 additions & 59 deletions tests/launch-test-environment.sh

This file was deleted.

18 changes: 0 additions & 18 deletions tests/test-notebooks-papermill.sh

This file was deleted.

0 comments on commit 8b86086

Please sign in to comment.