Skip to content
Snippets Groups Projects
Commit 79a2b8cd authored by Jonas Leder's avatar Jonas Leder
Browse files

added clonedb script to test db performance

parent 44db46a2
No related branches found
No related tags found
No related merge requests found
......@@ -8,3 +8,4 @@ import.csv
__pycache__/
builddate.txt
db/urls.db-journal
#!/usr/bin/env python3
# This script will clone all entries from the database. I ised this to show what happens with extreme database sizes.
#The output will be in excel-style CSV (";" instead of "," as column seperator.)
import sqlite3
from tqdm import tqdm
create_table = """
CREATE TABLE WEB_URL(
ID INTEGER PRIMARY KEY AUTOINCREMENT,
LONG_URL TEXT NOT NULL, SHORT_URL TEXT NOT NULL
);
"""
with sqlite3.connect('db/urls.db') as conn:
cursor = conn.cursor()
try: #Try making the database structure, if succeeded, exit, because there can't be any data.
cursor.execute(create_table)
raise Exception('No database Found', "Can't find the database ==> No data to export")
except sqlite3.OperationalError:
pass
conn = sqlite3.connect('db/urls.db')
cursor = conn.cursor()
res = cursor.execute('SELECT LONG_URL, SHORT_URL FROM WEB_URL WHERE 1') #read all data from database
with sqlite3.connect('db/urls.db') as conn2:
cursor2 = conn2.cursor()
for entries in tqdm(res.fetchall()):
cursor2.execute( #Insert the data in the SQL table
'INSERT INTO WEB_URL (LONG_URL, SHORT_URL) VALUES (?, ?)',
[entries[0], entries[1]]
)
print("Database duplicated")
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment