Skip to content

Commit

Permalink
Added FK support for SQLite (#78)
Browse files Browse the repository at this point in the history
* Added FK support for SQLite

* Fixed tests
  • Loading branch information
roll authored Aug 6, 2019
1 parent 8129325 commit 90d1007
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 22 deletions.
4 changes: 2 additions & 2 deletions tableschema_sql/mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def convert_descriptor(self, bucket, descriptor, index_fields=[], autoincrement=
constraints.append(constraint)

# Foreign keys
if self.__dialect == 'postgresql':
if self.__dialect in ['postgresql', 'sqlite']:
fks = descriptor.get('foreignKeys', [])
for fk in fks:
fields = fk['fields']
Expand Down Expand Up @@ -189,7 +189,7 @@ def restore_descriptor(self, table_name, columns, constraints, autoincrement_col

# Foreign keys
fks = []
if self.__dialect == 'postgresql':
if self.__dialect in ['postgresql', 'sqlite']:
for constraint in constraints:
if isinstance(constraint, sa.ForeignKeyConstraint):
resource = ''
Expand Down
51 changes: 31 additions & 20 deletions tests/test_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,10 +99,14 @@

# Tests

def test_storage():
@pytest.mark.parametrize('dialect, database_url', [
('postgresql', os.environ['POSTGRES_URL']),
('sqlite', os.environ['SQLITE_URL']),
])
def test_storage(dialect, database_url):

# Create storage
engine = create_engine(os.environ['POSTGRES_URL'])
engine = create_engine(database_url)
storage = Storage(engine=engine, prefix='test_storage_')

# Delete buckets
Expand Down Expand Up @@ -159,25 +163,27 @@ def test_storage():
{'name': 'yearmonth', 'type': 'string'}, # type fallback
],
}
assert storage.describe('location') == {
'fields': [
{'name': 'location', 'type': 'object'}, # type downgrade
{'name': 'geopoint', 'type': 'string'}, # type fallback
],
}
assert storage.describe('compound') == {
'fields': [
{'name': 'stats', 'type': 'object'},
{'name': 'persons', 'type': 'object'}, # type downgrade
],
}
if dialect != 'sqlite':
assert storage.describe('location') == {
'fields': [
{'name': 'location', 'type': 'object'}, # type downgrade
{'name': 'geopoint', 'type': 'string'}, # type fallback
],
}
assert storage.describe('compound') == {
'fields': [
{'name': 'stats', 'type': 'object'},
{'name': 'persons', 'type': 'object'}, # type downgrade
],
}

# Assert data
assert storage.read('articles') == cast(ARTICLES)['data']
assert storage.read('comments') == cast(COMMENTS)['data']
assert storage.read('temporal') == cast(TEMPORAL, skip=['duration', 'yearmonth'])['data']
assert storage.read('location') == cast(LOCATION, skip=['geopoint'])['data']
assert storage.read('compound') == cast(COMPOUND)['data']
if dialect != 'sqlite':
assert storage.read('location') == cast(LOCATION, skip=['geopoint'])['data']
assert storage.read('compound') == cast(COMPOUND)['data']

# Assert data with forced schema
storage.describe('compound', COMPOUND['schema'])
Expand All @@ -192,8 +198,8 @@ def test_storage():


@pytest.mark.parametrize('dialect, database_url', [
('sqlite', os.environ['SQLITE_URL']),
('mysql', os.environ['MYSQL_URL']),
('sqlite', os.environ['SQLITE_URL']),
])
def test_storage_limited_databases(dialect, database_url):

Expand All @@ -207,9 +213,9 @@ def test_storage_limited_databases(dialect, database_url):
# Create buckets
storage.create(
['articles', 'comments'],
[ARTICLES['schema'], COMMENTS['schema']],
[remove_fk(ARTICLES['schema']), remove_fk(COMMENTS['schema'])],
indexes_fields=[[['rating'], ['name']], []])
storage.create('comments', COMMENTS['schema'], force=True)
storage.create('comments', remove_fk(COMMENTS['schema']), force=True)
storage.create('temporal', TEMPORAL['schema'])
storage.create('location', LOCATION['schema'])
storage.create('compound', COMPOUND['schema'])
Expand Down Expand Up @@ -302,7 +308,7 @@ def test_storage_write_generator():
storage = Storage(engine=engine, prefix='test_storage_')

# Create bucket
storage.create('comments', COMMENTS['schema'], force=True)
storage.create('comments', remove_fk(COMMENTS['schema']), force=True)

# Write data using generator
gen = storage.write('comments', COMMENTS['data'], as_generator=True)
Expand Down Expand Up @@ -499,3 +505,8 @@ def cast(resource, skip=[]):
if field.type not in skip:
row[index] = field.cast_value(row[index])
return resource

def remove_fk(schema):
schema = deepcopy(schema)
del schema['foreignKeys']
return schema

0 comments on commit 90d1007

Please sign in to comment.