From 79a2b8cd18acf944d8f7e947cfb6ca51daa2300a Mon Sep 17 00:00:00 2001
From: jonasled <jonas@jonasled.de>
Date: Sat, 19 Oct 2019 12:36:55 +0200
Subject: [PATCH] added clonedb script to test db performance

---
 .gitignore |  1 +
 clonedb.py | 32 ++++++++++++++++++++++++++++++++
 2 files changed, 33 insertions(+)
 create mode 100644 clonedb.py

diff --git a/.gitignore b/.gitignore
index adc0bd9..5494ff5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,3 +8,4 @@ import.csv
 __pycache__/
 
 builddate.txt
+db/urls.db-journal
diff --git a/clonedb.py b/clonedb.py
new file mode 100644
index 0000000..09c1196
--- /dev/null
+++ b/clonedb.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+# This script will clone all entries from the database. I ised this to show what happens with extreme database sizes.
+#The output will be in excel-style CSV (";" instead of "," as column seperator.)
+
+import sqlite3
+from tqdm import tqdm
+
+create_table = """
+    CREATE TABLE WEB_URL(
+    ID INTEGER PRIMARY KEY AUTOINCREMENT,
+    LONG_URL TEXT NOT NULL, SHORT_URL TEXT NOT NULL
+    );
+    """
+with sqlite3.connect('db/urls.db') as conn:
+    cursor = conn.cursor()
+    try: #Try making the database structure, if succeeded, exit, because there can't be any data.
+        cursor.execute(create_table)
+        raise Exception('No database Found', "Can't find the database ==> No data to export")
+    except sqlite3.OperationalError:
+        pass
+
+conn = sqlite3.connect('db/urls.db')
+cursor = conn.cursor()
+res = cursor.execute('SELECT LONG_URL, SHORT_URL FROM WEB_URL WHERE 1') #read all data from database
+with sqlite3.connect('db/urls.db') as conn2:
+    cursor2 = conn2.cursor()
+    for entries in tqdm(res.fetchall()):
+        cursor2.execute( #Insert the data in the SQL table
+            'INSERT INTO WEB_URL (LONG_URL, SHORT_URL) VALUES (?, ?)',
+            [entries[0], entries[1]]
+        )
+print("Database duplicated")
\ No newline at end of file
-- 
GitLab