diff --git a/docs/devs/extensions.md b/docs/devs/extensions.md index 98c5d4dc2..0ceb9cb3f 100644 --- a/docs/devs/extensions.md +++ b/docs/devs/extensions.md @@ -44,25 +44,8 @@ Dependencies need to be added to `pyproject.toml` and `requirements.txt`, then t SQLite to PostgreSQL migration ----------------------- -LNbits currently supports SQLite and PostgreSQL databases. There is a migration script `tools/conv.py` that helps users migrate from SQLite to PostgreSQL. This script also copies all extension databases to the new backend. Unfortunately, it is not automatic (yet) which is why a new extension **must** add its migration to this script in order for all GitHub checks to pass. It is rather easy to add a migration though, just copy/paste one of the examples and replace the column names with the ones found in your extension `migrations.py`. The next step is to add a mock SQLite database with a few lines of sample data to `tests/data/mock_data.zip`. - -### Adding migration to `conv.py` - -Here is an example block from the `subdomains` exteion: - -```python -elif schema == "subdomain": - # SUBDOMAIN - res = sq.execute("SELECT * FROM subdomain;") - q = f""" - INSERT INTO subdomains.subdomain (id, domain, email, subdomain, ip, wallet, sats, duration, paid, record_type, time) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s::boolean, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) -``` - -Note how boolean columns must use `%s::boolean` and timestamps use `to_timestamp(%s)`. If your extension uses amounts (like the column `sats` above) it should use a PostgreSQL column of type `int8` or `numeric` (aka `BIGINT`). SQLite doesn't know the difference. +LNbits currently supports SQLite and PostgreSQL databases. There is a migration script `tools/conv.py` that helps users migrate from SQLite to PostgreSQL. This script also copies all extension databases to the new backend. ### Adding mock data to `mock_data.zip` -`mock_data.zip` contains a few lines of sample SQLite data and is used in automated GitHub test to see whether your migration in `conv.py` works. Run your extension and save a few lines of data into a SQLite `your_extension.db` file. Unzip `tests/data/mock_data.zip`, add `your_extension.db` and zip it again. Add the updated `mock_data.zip` to your PR. \ No newline at end of file +`mock_data.zip` contains a few lines of sample SQLite data and is used in automated GitHub test to see whether your migration in `conv.py` works. Run your extension and save a few lines of data into a SQLite `your_extension.sqlite3` file. Unzip `tests/data/mock_data.zip`, add `your_extension.sqlite3` and zip it again. Add the updated `mock_data.zip` to your PR. \ No newline at end of file diff --git a/tools/conv.py b/tools/conv.py index b93bcfbea..10e8c9cdd 100644 --- a/tools/conv.py +++ b/tools/conv.py @@ -1,6 +1,7 @@ import argparse import os import sqlite3 +from typing import List import psycopg2 from environs import Env # type: ignore @@ -110,627 +111,59 @@ def insert_to_pg(query, data): connection.close() -def migrate_core(sqlite_db_file): - sq = get_sqlite_cursor(sqlite_db_file) +def migrate_core(file: str, exclude_tables: List[str] = []): + print(f"Migrating core: {file}") + migrate_db(file, "public", exclude_tables) + print("✅ Migrated core") - # ACCOUNTS - res = sq.execute("SELECT * FROM accounts;") - q = f"INSERT INTO public.accounts (id, email, pass) VALUES (%s, %s, %s);" - insert_to_pg(q, res.fetchall()) - # WALLETS - res = sq.execute("SELECT * FROM wallets;") - q = f'INSERT INTO public.wallets (id, name, "user", adminkey, inkey) VALUES (%s, %s, %s, %s, %s);' - insert_to_pg(q, res.fetchall()) +def migrate_ext(file: str): + filename = os.path.basename(file) + schema = filename.replace("ext_", "").split(".")[0] + print(f"Migrating ext: {file}.{schema}") + migrate_db(file, schema) + print(f"✅ Migrated ext: {schema}") - # API PAYMENTS - res = sq.execute("SELECT * FROM apipayments;") - q = f""" - INSERT INTO public.apipayments( - checking_id, amount, fee, wallet, pending, memo, "time", hash, preimage, bolt11, extra, webhook, webhook_status) - VALUES (%s, %s, %s, %s, %s::boolean, %s, to_timestamp(%s), %s, %s, %s, %s, %s, %s); + +def migrate_db(file: str, schema: str, exclude_tables: List[str] = []): + sq = get_sqlite_cursor(file) + tables = sq.execute( + """ + SELECT name FROM sqlite_master + WHERE type='table' AND name not like 'sqlite?_%' escape '?' """ - insert_to_pg(q, res.fetchall()) + ).fetchall() - # BALANCE CHECK - res = sq.execute("SELECT * FROM balance_check;") - q = f"INSERT INTO public.balance_check(wallet, service, url) VALUES (%s, %s, %s);" - insert_to_pg(q, res.fetchall()) + for table in tables: + tableName = table[0] + if tableName in exclude_tables: + continue - # BALANCE NOTIFY - res = sq.execute("SELECT * FROM balance_notify;") - q = f"INSERT INTO public.balance_notify(wallet, url) VALUES (%s, %s);" - insert_to_pg(q, res.fetchall()) + columns = sq.execute(f"PRAGMA table_info({tableName})").fetchall() + q = build_insert_query(schema, tableName, columns) - # EXTENSIONS - res = sq.execute("SELECT * FROM extensions;") - q = f'INSERT INTO public.extensions("user", extension, active) VALUES (%s, %s, %s::boolean);' - insert_to_pg(q, res.fetchall()) - - print("Migrated: core") - - -def migrate_ext(sqlite_db_file, schema, ignore_missing=True): - - # skip this file it has been moved to ext_lnurldevices.sqlite3 - if sqlite_db_file == "data/ext_lnurlpos.sqlite3": - return - - print(f"Migrating {sqlite_db_file}.{schema}") - sq = get_sqlite_cursor(sqlite_db_file) - if schema == "bleskomat": - # BLESKOMAT LNURLS - res = sq.execute("SELECT * FROM bleskomat_lnurls;") - q = f""" - INSERT INTO bleskomat.bleskomat_lnurls( - id, bleskomat, wallet, hash, tag, params, api_key_id, initial_uses, remaining_uses, created_time, updated_time) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - - # BLESKOMATS - res = sq.execute("SELECT * FROM bleskomats;") - q = f""" - INSERT INTO bleskomat.bleskomats( - id, wallet, api_key_id, api_key_secret, api_key_encoding, name, fiat_currency, exchange_rate_provider, fee) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "captcha": - # CAPTCHA - res = sq.execute("SELECT * FROM captchas;") - q = f""" - INSERT INTO captcha.captchas( - id, wallet, url, memo, description, amount, "time", remembers, extras) - VALUES (%s, %s, %s, %s, %s, %s, to_timestamp(%s), %s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "copilot": - # OLD COPILOTS - res = sq.execute("SELECT * FROM copilots;") - q = f""" - INSERT INTO copilot.copilots( - id, "user", title, lnurl_toggle, wallet, animation1, animation2, animation3, animation1threshold, animation2threshold, animation3threshold, animation1webhook, animation2webhook, animation3webhook, lnurl_title, show_message, show_ack, show_price, amount_made, fullscreen_cam, iframe_url, "timestamp") - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - - # NEW COPILOTS - q = f""" - INSERT INTO copilot.newer_copilots( - id, "user", title, lnurl_toggle, wallet, animation1, animation2, animation3, animation1threshold, animation2threshold, animation3threshold, animation1webhook, animation2webhook, animation3webhook, lnurl_title, show_message, show_ack, show_price, amount_made, fullscreen_cam, iframe_url, "timestamp") - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "events": - # EVENTS - res = sq.execute("SELECT * FROM events;") - q = f""" - INSERT INTO events.events( - id, wallet, name, info, closing_date, event_start_date, event_end_date, amount_tickets, price_per_ticket, sold, "time") - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - # EVENT TICKETS - res = sq.execute("SELECT * FROM ticket;") - q = f""" - INSERT INTO events.ticket( - id, wallet, event, name, email, registered, paid, "time") - VALUES (%s, %s, %s, %s, %s, %s::boolean, %s::boolean, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "example": - # Example doesn't have a database at the moment - pass - elif schema == "hivemind": - # Hivemind doesn't have a database at the moment - pass - elif schema == "jukebox": - # JUKEBOXES - res = sq.execute("SELECT * FROM jukebox;") - q = f""" - INSERT INTO jukebox.jukebox( - id, "user", title, wallet, inkey, sp_user, sp_secret, sp_access_token, sp_refresh_token, sp_device, sp_playlists, price, profit) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - # JUKEBOX PAYMENTS - res = sq.execute("SELECT * FROM jukebox_payment;") - q = f""" - INSERT INTO jukebox.jukebox_payment( - payment_hash, juke_id, song_id, paid) - VALUES (%s, %s, %s, %s::boolean); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "withdraw": - # WITHDRAW LINK - res = sq.execute("SELECT * FROM withdraw_link;") - q = f""" - INSERT INTO withdraw.withdraw_link ( - id, - wallet, - title, - min_withdrawable, - max_withdrawable, - uses, - wait_time, - is_unique, - unique_hash, - k1, - open_time, - used, - usescsv, - webhook_url, - custom_url - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - # WITHDRAW HASH CHECK - res = sq.execute("SELECT * FROM hash_check;") - q = f""" - INSERT INTO withdraw.hash_check (id, lnurl_id) - VALUES (%s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "watchonly": - # WALLETS - res = sq.execute("SELECT * FROM wallets;") - q = f""" - INSERT INTO watchonly.wallets ( - id, - "user", - masterpub, - title, - address_no, - balance, - type, - fingerprint - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - # ADDRESSES - res = sq.execute("SELECT * FROM addresses;") - q = f""" - INSERT INTO watchonly.addresses (id, address, wallet, amount, branch_index, address_index, has_activity, note) - VALUES (%s, %s, %s, %s, %s, %s, %s::boolean, %s); - """ - insert_to_pg(q, res.fetchall()) - # CONFIG - res = sq.execute("SELECT * FROM config;") - q = f""" - INSERT INTO watchonly.config ("user", json_data) - VALUES (%s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "usermanager": - # USERS - res = sq.execute("SELECT * FROM users;") - q = f""" - INSERT INTO usermanager.users (id, name, admin, email, password) - VALUES (%s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - # WALLETS - res = sq.execute("SELECT * FROM wallets;") - q = f""" - INSERT INTO usermanager.wallets (id, admin, name, "user", adminkey, inkey) - VALUES (%s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "tpos": - # TPOSS - res = sq.execute("SELECT * FROM tposs;") - q = f""" - INSERT INTO tpos.tposs (id, wallet, name, currency, tip_wallet, tip_options) - VALUES (%s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "tipjar": - # TIPJARS - res = sq.execute("SELECT * FROM TipJars;") - q = f""" - INSERT INTO tipjar.TipJars (id, name, wallet, onchain, webhook) - VALUES (%s, %s, %s, %s, %s); - """ - pay_links = res.fetchall() - insert_to_pg(q, pay_links) - fix_id("tipjar.tipjars_id_seq", pay_links) - # TIPS - res = sq.execute("SELECT * FROM Tips;") - q = f""" - INSERT INTO tipjar.Tips (id, wallet, name, message, sats, tipjar) - VALUES (%s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "subdomains": - # DOMAIN - res = sq.execute("SELECT * FROM domain;") - q = f""" - INSERT INTO subdomains.domain ( - id, - wallet, - domain, - webhook, - cf_token, - cf_zone_id, - description, - cost, - amountmade, - allowed_record_types, - time - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - # SUBDOMAIN - res = sq.execute("SELECT * FROM subdomain;") - q = f""" - INSERT INTO subdomains.subdomain ( - id, - domain, - email, - subdomain, - ip, - wallet, - sats, - duration, - paid, - record_type, - time - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s::boolean, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "streamalerts": - # SERVICES - res = sq.execute("SELECT * FROM Services;") - q = f""" - INSERT INTO streamalerts.Services ( - id, - state, - twitchuser, - client_id, - client_secret, - wallet, - onchain, - servicename, - authenticated, - token - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s::boolean, %s); - """ - services = res.fetchall() - insert_to_pg(q, services) - fix_id("streamalerts.services_id_seq", services) - # DONATIONS - res = sq.execute("SELECT * FROM Donations;") - q = f""" - INSERT INTO streamalerts.Donations ( - id, - wallet, - name, - message, - cur_code, - sats, - amount, - service, - posted, - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s::boolean); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "splitpayments": - # TARGETS - res = sq.execute("SELECT * FROM targets;") - q = f""" - INSERT INTO splitpayments.targets (wallet, source, percent, alias) - VALUES (%s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "satspay": - # CHARGES - res = sq.execute("SELECT * FROM charges;") - q = f""" - INSERT INTO satspay.charges ( - id, - "user", - description, - onchainwallet, - onchainaddress, - lnbitswallet, - payment_request, - payment_hash, - webhook, - completelink, - completelinktext, - time, - amount, - balance, - timestamp - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "satsdice": - # SATSDICE PAY - res = sq.execute("SELECT * FROM satsdice_pay;") - q = f""" - INSERT INTO satsdice.satsdice_pay ( - id, - wallet, - title, - min_bet, - max_bet, - amount, - served_meta, - served_pr, - multiplier, - haircut, - chance, - base_url, - open_time - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - # SATSDICE WITHDRAW - res = sq.execute("SELECT * FROM satsdice_withdraw;") - q = f""" - INSERT INTO satsdice.satsdice_withdraw ( - id, - satsdice_pay, - value, - unique_hash, - k1, - open_time, - used - ) - VALUES (%s, %s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - # SATSDICE PAYMENT - res = sq.execute("SELECT * FROM satsdice_payment;") - q = f""" - INSERT INTO satsdice.satsdice_payment ( - payment_hash, - satsdice_pay, - value, - paid, - lost - ) - VALUES (%s, %s, %s, %s::boolean, %s::boolean); - """ - insert_to_pg(q, res.fetchall()) - # SATSDICE HASH CHECK - res = sq.execute("SELECT * FROM hash_checkw;") - q = f""" - INSERT INTO satsdice.hash_checkw (id, lnurl_id) - VALUES (%s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "paywall": - # PAYWALLS - res = sq.execute("SELECT * FROM paywalls;") - q = f""" - INSERT INTO paywall.paywalls( - id, - wallet, - url, - memo, - description, - amount, - time, - remembers, - extras - ) - VALUES (%s, %s, %s, %s, %s, %s, to_timestamp(%s), %s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "offlineshop": - # SHOPS - res = sq.execute("SELECT * FROM shops;") - q = f""" - INSERT INTO offlineshop.shops (id, wallet, method, wordlist) - VALUES (%s, %s, %s, %s); - """ - shops = res.fetchall() - insert_to_pg(q, shops) - fix_id("offlineshop.shops_id_seq", shops) - # ITEMS - res = sq.execute("SELECT * FROM items;") - q = f""" - INSERT INTO offlineshop.items (shop, id, name, description, image, enabled, price, unit, fiat_base_multiplier) - VALUES (%s, %s, %s, %s, %s, %s::boolean, %s, %s, %s); - """ - items = res.fetchall() - insert_to_pg(q, items) - fix_id("offlineshop.items_id_seq", items) - elif schema == "lnurlpos" or schema == "lnurldevice": - # lnurldevice - res = sq.execute("SELECT * FROM lnurldevices;") - q = f""" - INSERT INTO lnurldevice.lnurldevices (id, key, title, wallet, currency, device, profit, timestamp) - VALUES (%s, %s, %s, %s, %s, %s, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - # lnurldevice PAYMENT - res = sq.execute("SELECT * FROM lnurldevicepayment;") - q = f""" - INSERT INTO lnurldevice.lnurldevicepayment (id, deviceid, payhash, payload, pin, sats, timestamp) - VALUES (%s, %s, %s, %s, %s, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "lnurlp": - # PAY LINKS - res = sq.execute("SELECT * FROM pay_links;") - q = f""" - INSERT INTO lnurlp.pay_links ( - id, - wallet, - description, - min, - served_meta, - served_pr, - webhook_url, - success_text, - success_url, - currency, - comment_chars, - max, - fiat_base_multiplier - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); - """ - pay_links = res.fetchall() - insert_to_pg(q, pay_links) - fix_id("lnurlp.pay_links_id_seq", pay_links) - elif schema == "lndhub": - # LndHub doesn't have a database at the moment - pass - elif schema == "lnticket": - # TICKET - res = sq.execute("SELECT * FROM ticket;") - q = f""" - INSERT INTO lnticket.ticket ( - id, - form, - email, - ltext, - name, - wallet, - sats, - paid, - time - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s::boolean, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - # FORM - res = sq.execute("SELECT * FROM form2;") - q = f""" - INSERT INTO lnticket.form2 ( - id, - wallet, - name, - webhook, - description, - flatrate, - amount, - amountmade, - time - ) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "livestream": - # LIVESTREAMS - res = sq.execute("SELECT * FROM livestreams;") - q = f""" - INSERT INTO livestream.livestreams ( - id, - wallet, - fee_pct, - current_track - ) - VALUES (%s, %s, %s, %s); - """ - livestreams = res.fetchall() - insert_to_pg(q, livestreams) - fix_id("livestream.livestreams_id_seq", livestreams) - # PRODUCERS - res = sq.execute("SELECT * FROM producers;") - q = f""" - INSERT INTO livestream.producers ( - livestream, - id, - "user", - wallet, - name - ) - VALUES (%s, %s, %s, %s, %s); - """ - producers = res.fetchall() - insert_to_pg(q, producers) - fix_id("livestream.producers_id_seq", producers) - # TRACKS - res = sq.execute("SELECT * FROM tracks;") - q = f""" - INSERT INTO livestream.tracks ( - livestream, - id, - download_url, - price_msat, - name, - producer - ) - VALUES (%s, %s, %s, %s, %s, %s); - """ - tracks = res.fetchall() - insert_to_pg(q, tracks) - fix_id("livestream.tracks_id_seq", tracks) - elif schema == "lnaddress": - # DOMAINS - res = sq.execute("SELECT * FROM domain;") - q = f""" - INSERT INTO lnaddress.domain( - id, wallet, domain, webhook, cf_token, cf_zone_id, cost, "time") - VALUES (%s, %s, %s, %s, %s, %s, %s, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - # ADDRESSES - res = sq.execute("SELECT * FROM address;") - q = f""" - INSERT INTO lnaddress.address( - id, wallet, domain, email, username, wallet_key, wallet_endpoint, sats, duration, paid, "time") - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s::boolean, to_timestamp(%s)); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "discordbot": - # USERS - res = sq.execute("SELECT * FROM users;") - q = f""" - INSERT INTO discordbot.users( - id, name, admin, discord_id) - VALUES (%s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - # WALLETS - res = sq.execute("SELECT * FROM wallets;") - q = f""" - INSERT INTO discordbot.wallets( - id, admin, name, "user", adminkey, inkey) - VALUES (%s, %s, %s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - elif schema == "scrub": - # SCRUB LINKS - res = sq.execute("SELECT * FROM scrub_links;") - q = f""" - INSERT INTO scrub.scrub_links ( - id, - wallet, - description, - payoraddress - ) - VALUES (%s, %s, %s, %s); - """ - insert_to_pg(q, res.fetchall()) - else: - print(f"❌ Not implemented: {schema}") - sq.close() - - if ignore_missing == False: - raise Exception( - f"Not implemented: {schema}. Use --ignore-missing to skip missing extensions." - ) - return - - print(f"✅ Migrated: {schema}") + data = sq.execute(f"SELECT * FROM {tableName};").fetchall() + insert_to_pg(q, data) sq.close() +def build_insert_query(schema, tableName, columns): + to_columns = ", ".join(map(lambda column: f'"{column[1]}"', columns)) + values = ", ".join(map(lambda column: to_column_type(column[2]), columns)) + return f""" + INSERT INTO {schema}.{tableName}({to_columns}) + VALUES ({values}); + """ + + +def to_column_type(columnType): + if columnType == "TIMESTAMP": + return "to_timestamp(%s)" + if columnType == "BOOLEAN": + return "%s::boolean" + return "%s" + + parser = argparse.ArgumentParser( description="LNbits migration tool for migrating data from SQLite to PostgreSQL" ) @@ -774,11 +207,11 @@ args = parser.parse_args() print("Selected path: ", args.sqlite_path) if os.path.isdir(args.sqlite_path): + exclude_tables = ["dbversions"] file = os.path.join(args.sqlite_path, "database.sqlite3") check_db_versions(file) if not args.extensions_only: - print(f"Migrating: {file}") - migrate_core(file) + migrate_core(file, exclude_tables) if os.path.isdir(args.sqlite_path): files = [ @@ -787,13 +220,8 @@ if os.path.isdir(args.sqlite_path): else: files = [args.sqlite_path] +excluded_exts = ["ext_lnurlpos.sqlite3"] for file in files: filename = os.path.basename(file) - if filename.startswith("ext_"): - schema = filename.replace("ext_", "").split(".")[0] - print(f"Migrating: {file}") - migrate_ext( - file, - schema, - ignore_missing=args.skip_missing, - ) + if filename.startswith("ext_") and filename not in excluded_exts: + migrate_ext(file)