Skip to content

Commit

Permalink
Test data frames with pyspark (#135)
Browse files Browse the repository at this point in the history
* Add tags to test_dual_write

* Add the Python 3.5 example.

* Fix up style and imports

* Remove excess newline

* Fix new test
  • Loading branch information
holdenk authored Aug 27, 2024
1 parent 6901d19 commit c1adb45
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 1 deletion.
4 changes: 4 additions & 0 deletions python/examples/test_dual_write.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import tempfile

# tag::test[]
from sparktestingbase.sqltestcase import SQLTestCase
from pyspark.sql.functions import current_timestamp
from pyspark.sql.types import Row
Expand All @@ -21,3 +22,6 @@ def test_actual_dual_write(self):
df1 = self.sqlCtx.read.format("parquet").load(p1)
df2 = self.sqlCtx.read.format("parquet").load(p2)
self.assertDataFrameEqual(df2.select("times"), df1, 0.1)


# end::test[]
39 changes: 39 additions & 0 deletions python/examples/test_dual_write_new.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import os
import tempfile

# tag::test[]
import unittest
from pyspark.sql import SparkSession
from pyspark.sql.functions import current_timestamp
from pyspark.sql.types import Row
from pyspark.testing.utils import assertDataFrameEqual
from .dual_write import DualWriteExample


class DualWriteTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.spark = SparkSession.builder.appName(
"Testing PySpark Example"
).getOrCreate()

@classmethod
def tearDownClass(cls):
cls.spark.stop()

def test_always_passes(self):
self.assertTrue(True)

def test_actual_dual_write(self):
tempdir = tempfile.mkdtemp()
p1 = os.path.join(tempdir, "data1")
p2 = os.path.join(tempdir, "data2")
df = self.spark.createDataFrame([Row("timbit"), Row("farted")], ["names"])
combined = df.withColumn("times", current_timestamp())
DualWriteExample().do_write(combined, p1, p2)
df1 = self.spark.read.format("parquet").load(p1)
df2 = self.spark.read.format("parquet").load(p2)
assertDataFrameEqual(df2.select("times"), df1, 0.1)


# end::test[]
2 changes: 1 addition & 1 deletion python/tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ deps =
[testenv:flake8]
extras = tests
skipsdist = True
commands = flake8 --ignore=F403,E402,F401,F405,W503 examples
commands = flake8 --ignore=F403,E402,F401,F405,W503,E265 examples
allowlist_externals = flake8

[testenv:mypy]
Expand Down

0 comments on commit c1adb45

Please sign in to comment.