Newer
Older
#!/usr/bin/env python3
# This script will clone all entries from the database. I ised this to show what happens with extreme database sizes.
#The output will be in excel-style CSV (";" instead of "," as column seperator.)
import sqlite3
from tqdm import tqdm
create_table = """
CREATE TABLE WEB_URL(
ID INTEGER PRIMARY KEY AUTOINCREMENT,
LONG_URL TEXT NOT NULL, SHORT_URL TEXT NOT NULL
);
"""
with sqlite3.connect('db/urls.db') as conn:
cursor = conn.cursor()
try: #Try making the database structure, if succeeded, exit, because there can't be any data.
cursor.execute(create_table)
raise Exception('No database Found', "Can't find the database ==> No data to export")
except sqlite3.OperationalError:
pass
conn = sqlite3.connect('db/urls.db')
cursor = conn.cursor()
res = cursor.execute('SELECT LONG_URL, SHORT_URL FROM WEB_URL WHERE 1') #read all data from database
with sqlite3.connect('db/urls.db') as conn2:
cursor2 = conn2.cursor()
for entries in tqdm(res.fetchall()):
cursor2.execute( #Insert the data in the SQL table
'INSERT INTO WEB_URL (LONG_URL, SHORT_URL) VALUES (?, ?)',
[entries[0], entries[1]]
)