Skip to content

Commit

Permalink
start aux snapshots
Browse files Browse the repository at this point in the history
  • Loading branch information
asg017 committed Nov 13, 2024
1 parent 93218e4 commit 163654b
Show file tree
Hide file tree
Showing 3 changed files with 230 additions and 2 deletions.
4 changes: 2 additions & 2 deletions sqlite-vec.c
Original file line number Diff line number Diff line change
Expand Up @@ -3526,7 +3526,7 @@ void vec0_free(vec0_vtab *p) {
}

int vec0_num_defined_user_columns(vec0_vtab *p) {
return p->numVectorColumns + p->numPartitionColumns;
return p->numVectorColumns + p->numPartitionColumns + p->numAuxiliaryColumns;
}

/**
Expand Down Expand Up @@ -4501,7 +4501,7 @@ static int vec0_init(sqlite3 *db, void *pAux, int argc, const char *const *argv,
*pzErr = sqlite3_mprintf(
VEC_CONSTRUCTOR_ERROR
"More than %d auxiliary columns were provided",
VEC0_MAX_PARTITION_COLUMNS);
VEC0_MAX_AUXILIARY_COLUMNS);
goto error;
}
auxColumn.type = cType;
Expand Down
149 changes: 149 additions & 0 deletions tests/__snapshots__/test-auxiliary.ambr
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
# serializer version: 1
# name: test_constructor_limit[max 16 auxiliary columns]
dict({
'error': 'OperationalError',
'message': 'vec0 constructor error: More than 16 auxiliary columns were provided',
})
# ---
# name: test_normal.1
OrderedDict({
'sql': 'select * from v',
'rows': list([
OrderedDict({
'rowid': 1,
'a': b'\x11\x11\x11\x11',
'name': 'alex',
}),
OrderedDict({
'rowid': 2,
'a': b'""""',
'name': 'brian',
}),
OrderedDict({
'rowid': 3,
'a': b'3333',
'name': 'craig',
}),
]),
})
# ---
# name: test_normal.2
dict({
'v_auxiliary': OrderedDict({
'sql': 'select * from v_auxiliary',
'rows': list([
OrderedDict({
'rowid': 1,
'value00': 'alex',
}),
OrderedDict({
'rowid': 2,
'value00': 'brian',
}),
OrderedDict({
'rowid': 3,
'value00': 'craig',
}),
]),
}),
'v_chunks': OrderedDict({
'sql': 'select * from v_chunks',
'rows': list([
OrderedDict({
'chunk_id': 1,
'size': 8,
'validity': b'\x07',
'rowids': b'\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
}),
]),
}),
'v_rowids': OrderedDict({
'sql': 'select * from v_rowids',
'rows': list([
OrderedDict({
'rowid': 1,
'id': None,
'chunk_id': 1,
'chunk_offset': 0,
}),
OrderedDict({
'rowid': 2,
'id': None,
'chunk_id': 1,
'chunk_offset': 1,
}),
OrderedDict({
'rowid': 3,
'id': None,
'chunk_id': 1,
'chunk_offset': 2,
}),
]),
}),
'v_vector_chunks00': OrderedDict({
'sql': 'select * from v_vector_chunks00',
'rows': list([
OrderedDict({
'rowid': 1,
'vectors': b'\x11\x11\x11\x11""""3333\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
}),
]),
}),
})
# ---
# name: test_normal[sqlite_master]
OrderedDict({
'sql': 'select * from sqlite_master order by name',
'rows': list([
OrderedDict({
'type': 'index',
'name': 'sqlite_autoindex_v_vector_chunks00_1',
'tbl_name': 'v_vector_chunks00',
'rootpage': 6,
'sql': None,
}),
OrderedDict({
'type': 'table',
'name': 'sqlite_sequence',
'tbl_name': 'sqlite_sequence',
'rootpage': 3,
'sql': 'CREATE TABLE sqlite_sequence(name,seq)',
}),
OrderedDict({
'type': 'table',
'name': 'v',
'tbl_name': 'v',
'rootpage': 0,
'sql': 'CREATE VIRTUAL TABLE v using vec0(a float[1], +name text, chunk_size=8)',
}),
OrderedDict({
'type': 'table',
'name': 'v_auxiliary',
'tbl_name': 'v_auxiliary',
'rootpage': 7,
'sql': 'CREATE TABLE "v_auxiliary"( rowid integer PRIMARY KEY , value00)',
}),
OrderedDict({
'type': 'table',
'name': 'v_chunks',
'tbl_name': 'v_chunks',
'rootpage': 2,
'sql': 'CREATE TABLE "v_chunks"(chunk_id INTEGER PRIMARY KEY AUTOINCREMENT,size INTEGER NOT NULL,validity BLOB NOT NULL,rowids BLOB NOT NULL)',
}),
OrderedDict({
'type': 'table',
'name': 'v_rowids',
'tbl_name': 'v_rowids',
'rootpage': 4,
'sql': 'CREATE TABLE "v_rowids"(rowid INTEGER PRIMARY KEY AUTOINCREMENT,id,chunk_id INTEGER,chunk_offset INTEGER)',
}),
OrderedDict({
'type': 'table',
'name': 'v_vector_chunks00',
'tbl_name': 'v_vector_chunks00',
'rootpage': 5,
'sql': 'CREATE TABLE "v_vector_chunks00"(rowid PRIMARY KEY,vectors BLOB NOT NULL)',
}),
]),
})
# ---
79 changes: 79 additions & 0 deletions tests/test-auxiliary.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import sqlite3
from collections import OrderedDict


def test_constructor_limit(db, snapshot):
assert exec(
db,
f"""
create virtual table v using vec0(
{",".join([f"+aux{x} integer" for x in range(17)])}
v float[1]
)
""",
) == snapshot(name="max 16 auxiliary columns")


def test_normal(db, snapshot):
db.execute(
"create virtual table v using vec0(a float[1], +name text, chunk_size=8)"
)
assert exec(db, "select * from sqlite_master order by name") == snapshot(
name="sqlite_master"
)

db.execute("insert into v(a, name) values (?, ?)", [b"\x11\x11\x11\x11", "alex"])
db.execute("insert into v(a, name) values (?, ?)", [b"\x22\x22\x22\x22", "brian"])
db.execute("insert into v(a, name) values (?, ?)", [b"\x33\x33\x33\x33", "craig"])

assert exec(db, "select * from v") == snapshot()
assert vec0_shadow_table_contents(db, "v") == snapshot()


def test_types(db, snapshot):
pass


def test_updates(db, snapshot):
pass


def test_deletes(db, snapshot):
pass


def test_knn(db, snapshot):
pass


def exec(db, sql, parameters=[]):
try:
rows = db.execute(sql, parameters).fetchall()
except (sqlite3.OperationalError, sqlite3.DatabaseError) as e:
return {
"error": e.__class__.__name__,
"message": str(e),
}
a = []
for row in rows:
o = OrderedDict()
for k in row.keys():
o[k] = row[k]
a.append(o)
result = OrderedDict()
result["sql"] = sql
result["rows"] = a
return result


def vec0_shadow_table_contents(db, v):
shadow_tables = [
row[0]
for row in db.execute(
"select name from sqlite_master where name like ? order by 1", [f"{v}_%"]
).fetchall()
]
o = {}
for shadow_table in shadow_tables:
o[shadow_table] = exec(db, f"select * from {shadow_table}")
return o

0 comments on commit 163654b

Please sign in to comment.