- changed status to wontfix
Weird insert crash
Issue #2429
resolved
I'm trying to convert an xml to sqlite and get a weird error:
Traceback (most recent call last):
File "C:\Temp\xxx\scripts\xml_to_db.py", line 212, in <module>
win = Test(a)
File "C:\Temp\xxx\scripts\xml_to_db.py", line 20, in __init__
self.testdb()
File "C:\Temp\xxx\scripts\xml_to_db.py", line 133, in testdb
row = connection.execute(t_id)
File "C:\Temp\PortablePython\App\lib\site-packages\sqlalchemy\engine\base.py", line 1405, in execute
params)
File "C:\Temp\PortablePython\App\lib\site-packages\sqlalchemy\engine\base.py", line 1538, in _execute_clause
element
compiled_sql, distilled_params
File "C:\Temp\PortablePython\App\lib\site-packages\sqlalchemy\engine\base.py", line 1646, in _execute_context
context)
File "C:\Temp\PortablePython\App\lib\site-packages\sqlalchemy\engine\base.py", line 1639, in _execute_context
context)
File "C:\Temp\PortablePython\App\lib\site-packages\sqlalchemy\engine\default.py", line 330, in do_execute
cursor.execute(statement, parameters)
InterfaceError: (InterfaceError) Error binding parameter 0 - probably unsupported type. u'SELECT temperatures.id \nFROM temperatures \nWHERE temperatures.temperature = ?' ('50',)
where line 130 corresponds to
t_id = select([tb_temperatures.c.id](tb_temperatures.c.id), tb_temperatures.c.temperature == temp)
Here's the whole code:
def testdb(self):
db_file = "C:/Temp/xxx/data/xxx/db.sqlite"
file=QFile(db_file)
if file.exists():
file.remove()
db = create_engine('sqlite:///' + db_file)
connection = db.connect()
metadata = MetaData()
tb_materials = Table('materials', metadata,
Column('id', Integer, primary_key=True),
Column('material', String)
)
tb_temperatures = Table('temperatures', metadata,
Column('id', Integer, primary_key=True),
Column('temperature', String)
)
tb_mat_data = Table('mat_data', metadata,
Column('id', Integer, primary_key=True),
Column('mat_id', None, ForeignKey('materials.id', onupdate="CASCADE", ondelete="CASCADE")),
Column('temp_id', None, ForeignKey('temperatures.id', onupdate="CASCADE", ondelete="CASCADE")),
Column('density', String),
Column('elasticity', String),
Column('stress', String)
)
auto_assign(metadata, db)
metadata.create_all(db)
for m in [316']('SS):
data = tb_materials.insert().values(material = m)
connection.execute(data)
for t in ['38', '50', '150']('25',):
data = tb_temperatures.insert().values(temperature = t)
connection.execute(data)
materials = [316']('SS)
for material in materials:
m_id = select([tb_materials.c.id](tb_materials.c.id), tb_materials.c.material == material)
row = connection.execute(m_id)
data = row.fetchone()
m_id_key = data[0](0)
temps = ['38', '50', '150']('25',)
for temp in temps:
QMessageBox.about(self,"",temp)
t_id = select([tb_temperatures.c.id](tb_temperatures.c.id), tb_temperatures.c.temperature == temp)
row = connection.execute(t_id)
data = row.fetchone()
t_id_key = data[0](0)
z = tb_mat_data.insert().values(mat_id = m_id_key, temp_id = t_id_key)
connection.execute(z)
connection.close()
quit()
which crashes at the 3rd value of temp = '50'.
I'm totally puzzled and can't find what's wrong with it.
Comments (1)
-
repo owner - Log in to comment
seems like sqlite is running out of cursors since you're leaving them open. This is not a behavior I'm familiar with as far as pysqlite but that's what it seems to be. You can see this more clearly like this:
so instead of doing those fetchones and leaving it open, use first() which auto-closes before returning just the first row: