diff --git a/.gitignore b/.gitignore
index adc0bd9dfcdfd367fca2b1af9bf5f7dfb0ac1ce9..5494ff540e47fc83b56da316d3b8a61048643254 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,3 +8,4 @@ import.csv
 __pycache__/
 
 builddate.txt
+db/urls.db-journal
diff --git a/clonedb.py b/clonedb.py
new file mode 100644
index 0000000000000000000000000000000000000000..09c1196e204db422f3e34341093f67d47c12a115
--- /dev/null
+++ b/clonedb.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+# This script will clone all entries from the database. I ised this to show what happens with extreme database sizes.
+#The output will be in excel-style CSV (";" instead of "," as column seperator.)
+
+import sqlite3
+from tqdm import tqdm
+
+create_table = """
+    CREATE TABLE WEB_URL(
+    ID INTEGER PRIMARY KEY AUTOINCREMENT,
+    LONG_URL TEXT NOT NULL, SHORT_URL TEXT NOT NULL
+    );
+    """
+with sqlite3.connect('db/urls.db') as conn:
+    cursor = conn.cursor()
+    try: #Try making the database structure, if succeeded, exit, because there can't be any data.
+        cursor.execute(create_table)
+        raise Exception('No database Found', "Can't find the database ==> No data to export")
+    except sqlite3.OperationalError:
+        pass
+
+conn = sqlite3.connect('db/urls.db')
+cursor = conn.cursor()
+res = cursor.execute('SELECT LONG_URL, SHORT_URL FROM WEB_URL WHERE 1') #read all data from database
+with sqlite3.connect('db/urls.db') as conn2:
+    cursor2 = conn2.cursor()
+    for entries in tqdm(res.fetchall()):
+        cursor2.execute( #Insert the data in the SQL table
+            'INSERT INTO WEB_URL (LONG_URL, SHORT_URL) VALUES (?, ?)',
+            [entries[0], entries[1]]
+        )
+print("Database duplicated")
\ No newline at end of file