Adds support to serialize dict as JSON
There are likely some vendor-specific enhancements (such as `JSONB` in PostgreSQL) but the current method of type guessing doesn't know anything about the underlying database.
This commit is contained in:
parent
d51fcb604f
commit
aeaab50043
@ -1,7 +1,7 @@
|
||||
from datetime import datetime, date
|
||||
|
||||
from sqlalchemy import Integer, UnicodeText, Float, BigInteger
|
||||
from sqlalchemy import Boolean, Date, DateTime, Unicode
|
||||
from sqlalchemy import Boolean, Date, DateTime, Unicode, JSON
|
||||
from sqlalchemy.types import TypeEngine
|
||||
|
||||
|
||||
@ -15,6 +15,7 @@ class Types(object):
|
||||
boolean = Boolean
|
||||
date = Date
|
||||
datetime = DateTime
|
||||
json = JSON
|
||||
|
||||
def guess(self, sample):
|
||||
"""Given a single sample, guess the column type for the field.
|
||||
@ -34,4 +35,6 @@ class Types(object):
|
||||
return self.datetime
|
||||
elif isinstance(sample, date):
|
||||
return self.date
|
||||
elif isinstance(sample, dict):
|
||||
return self.json
|
||||
return self.text
|
||||
|
||||
@ -192,6 +192,20 @@ class TableTestCase(unittest.TestCase):
|
||||
)
|
||||
assert len(self.tbl) == len(TEST_DATA) + 1, len(self.tbl)
|
||||
|
||||
def test_insert_json(self):
|
||||
last_id = self.tbl.insert({
|
||||
'date': datetime(2011, 1, 2),
|
||||
'temperature': -10,
|
||||
'place': 'Berlin',
|
||||
'info': {
|
||||
'currency': 'EUR',
|
||||
'language': 'German',
|
||||
'population': 3292365
|
||||
}
|
||||
})
|
||||
assert len(self.tbl) == len(TEST_DATA) + 1, len(self.tbl)
|
||||
assert self.tbl.find_one(id=last_id)['place'] == 'Berlin'
|
||||
|
||||
def test_upsert(self):
|
||||
self.tbl.upsert({
|
||||
'date': datetime(2011, 1, 2),
|
||||
|
||||
Loading…
Reference in New Issue
Block a user