CI: DB migration with real data (#776)

* fix migration conv.py

* migrate with mock data
This commit is contained in:
calle
2022-07-25 09:13:41 +02:00
committed by GitHub
parent 4b5e113c03
commit 4bb184e08b
4 changed files with 74 additions and 35 deletions

View File

@@ -38,12 +38,14 @@ jobs:
./venv/bin/python -m pip install --upgrade pip ./venv/bin/python -m pip install --upgrade pip
./venv/bin/pip install -r requirements.txt ./venv/bin/pip install -r requirements.txt
./venv/bin/pip install pytest pytest-asyncio pytest-cov requests mock ./venv/bin/pip install pytest pytest-asyncio pytest-cov requests mock
sudo apt install unzip
- name: Run migrations - name: Run migrations
run: | run: |
rm -rf ./data rm -rf ./data
mkdir -p ./data mkdir -p ./data
export LNBITS_DATA_FOLDER="./data" export LNBITS_DATA_FOLDER="./data"
unzip tests/data/mock_data.zip -d ./data
timeout 5s ./venv/bin/uvicorn lnbits.__main__:app --host 0.0.0.0 --port 5001 || code=$?; if [[ $code -ne 124 && $code -ne 0 ]]; then exit $code; fi timeout 5s ./venv/bin/uvicorn lnbits.__main__:app --host 0.0.0.0 --port 5001 || code=$?; if [[ $code -ne 124 && $code -ne 0 ]]; then exit $code; fi
export LNBITS_DATABASE_URL="postgres://postgres:postgres@0.0.0.0:5432/postgres" export LNBITS_DATABASE_URL="postgres://postgres:postgres@0.0.0.0:5432/postgres"
timeout 5s ./venv/bin/uvicorn lnbits.__main__:app --host 0.0.0.0 --port 5001 || code=$?; if [[ $code -ne 124 && $code -ne 0 ]]; then exit $code; fi timeout 5s ./venv/bin/uvicorn lnbits.__main__:app --host 0.0.0.0 --port 5001 || code=$?; if [[ $code -ne 124 && $code -ne 0 ]]; then exit $code; fi
./venv/bin/python tools/conv.py --dont-ignore-missing ./venv/bin/python tools/conv.py

2
.gitignore vendored
View File

@@ -15,7 +15,7 @@ __pycache__
.webassets-cache .webassets-cache
htmlcov htmlcov
test-reports test-reports
tests/data tests/data/*.sqlite3
*.swo *.swo
*.swp *.swp

BIN
tests/data/mock_data.zip Normal file

Binary file not shown.

View File

@@ -38,8 +38,6 @@ else:
pgport = LNBITS_DATABASE_URL.split("@")[1].split(":")[1].split("/")[0] pgport = LNBITS_DATABASE_URL.split("@")[1].split(":")[1].split("/")[0]
pgschema = "" pgschema = ""
print(pgdb, pguser, pgpswd, pghost, pgport, pgschema)
def get_sqlite_cursor(sqdb) -> sqlite3: def get_sqlite_cursor(sqdb) -> sqlite3:
consq = sqlite3.connect(sqdb) consq = sqlite3.connect(sqdb)
@@ -99,7 +97,11 @@ def insert_to_pg(query, data):
for d in data: for d in data:
try: try:
cursor.execute(query, d) cursor.execute(query, d)
except: except Exception as e:
if args.ignore_errors:
print(e)
print(f"Failed to insert {d}")
else:
raise ValueError(f"Failed to insert {d}") raise ValueError(f"Failed to insert {d}")
connection.commit() connection.commit()
@@ -256,9 +258,10 @@ def migrate_ext(sqlite_db_file, schema, ignore_missing=True):
k1, k1,
open_time, open_time,
used, used,
usescsv usescsv,
webhook_url
) )
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
""" """
insert_to_pg(q, res.fetchall()) insert_to_pg(q, res.fetchall())
# WITHDRAW HASH CHECK # WITHDRAW HASH CHECK
@@ -316,8 +319,8 @@ def migrate_ext(sqlite_db_file, schema, ignore_missing=True):
# TPOSS # TPOSS
res = sq.execute("SELECT * FROM tposs;") res = sq.execute("SELECT * FROM tposs;")
q = f""" q = f"""
INSERT INTO tpos.tposs (id, wallet, name, currency) INSERT INTO tpos.tposs (id, wallet, name, currency, tip_wallet, tip_options)
VALUES (%s, %s, %s, %s); VALUES (%s, %s, %s, %s, %s, %s);
""" """
insert_to_pg(q, res.fetchall()) insert_to_pg(q, res.fetchall())
elif schema == "tipjar": elif schema == "tipjar":
@@ -512,12 +515,13 @@ def migrate_ext(sqlite_db_file, schema, ignore_missing=True):
wallet, wallet,
url, url,
memo, memo,
description,
amount, amount,
time, time,
remembers, remembers,
extra extras
) )
VALUES (%s, %s, %s, %s, %s, to_timestamp(%s), %s, %s); VALUES (%s, %s, %s, %s, %s, %s, to_timestamp(%s), %s, %s);
""" """
insert_to_pg(q, res.fetchall()) insert_to_pg(q, res.fetchall())
elif schema == "offlineshop": elif schema == "offlineshop":
@@ -543,15 +547,15 @@ def migrate_ext(sqlite_db_file, schema, ignore_missing=True):
# lnurldevice # lnurldevice
res = sq.execute("SELECT * FROM lnurldevices;") res = sq.execute("SELECT * FROM lnurldevices;")
q = f""" q = f"""
INSERT INTO lnurldevice.lnurldevices (id, key, title, wallet, currency, device, profit) INSERT INTO lnurldevice.lnurldevices (id, key, title, wallet, currency, device, profit, timestamp)
VALUES (%s, %s, %s, %s, %s, %s, %s); VALUES (%s, %s, %s, %s, %s, %s, %s, to_timestamp(%s));
""" """
insert_to_pg(q, res.fetchall()) insert_to_pg(q, res.fetchall())
# lnurldevice PAYMENT # lnurldevice PAYMENT
res = sq.execute("SELECT * FROM lnurldevicepayment;") res = sq.execute("SELECT * FROM lnurldevicepayment;")
q = f""" q = f"""
INSERT INTO lnurldevice.lnurldevicepayment (id, deviceid, payhash, payload, pin, sats) INSERT INTO lnurldevice.lnurldevicepayment (id, deviceid, payhash, payload, pin, sats, timestamp)
VALUES (%s, %s, %s, %s, %s, %s); VALUES (%s, %s, %s, %s, %s, %s, to_timestamp(%s));
""" """
insert_to_pg(q, res.fetchall()) insert_to_pg(q, res.fetchall())
elif schema == "lnurlp": elif schema == "lnurlp":
@@ -710,36 +714,69 @@ def migrate_ext(sqlite_db_file, schema, ignore_missing=True):
sq.close() sq.close()
parser = argparse.ArgumentParser(description="Migrate data from SQLite to PostgreSQL") parser = argparse.ArgumentParser(
description="LNbits migration tool for migrating data from SQLite to PostgreSQL"
)
parser.add_argument( parser.add_argument(
dest="sqlite_file", dest="sqlite_path",
const=True, const=True,
nargs="?", nargs="?",
help="SQLite DB to migrate from", help=f"SQLite DB folder *or* single extension db file to migrate. Default: {sqfolder}",
default="data/database.sqlite3", default=sqfolder,
type=str, type=str,
) )
parser.add_argument( parser.add_argument(
"-i", "-e",
"--dont-ignore-missing", "--extensions-only",
help="Error if migration is missing for an extension.", help="Migrate only extensions",
required=False, required=False,
default=False, default=False,
const=True, action="store_true",
nargs="?",
type=bool,
) )
parser.add_argument(
"-s",
"--skip-missing",
help="Error if migration is missing for an extension",
required=False,
default=False,
action="store_true",
)
parser.add_argument(
"-i",
"--ignore-errors",
help="Don't error if migration fails",
required=False,
default=False,
action="store_true",
)
args = parser.parse_args() args = parser.parse_args()
print(args) print("Selected path: ", args.sqlite_path)
check_db_versions(args.sqlite_file) if os.path.isdir(args.sqlite_path):
migrate_core(args.sqlite_file) file = os.path.join(args.sqlite_path, "database.sqlite3")
check_db_versions(file)
if not args.extensions_only:
print(f"Migrating: {file}")
migrate_core(file)
if os.path.isdir(args.sqlite_path):
files = [
os.path.join(args.sqlite_path, file) for file in os.listdir(args.sqlite_path)
]
else:
files = [args.sqlite_path]
files = os.listdir(sqfolder)
for file in files: for file in files:
path = f"data/{file}" filename = os.path.basename(file)
if file.startswith("ext_"): if filename.startswith("ext_"):
schema = file.replace("ext_", "").split(".")[0] schema = filename.replace("ext_", "").split(".")[0]
print(f"Migrating: {schema}") print(f"Migrating: {file}")
migrate_ext(path, schema, ignore_missing=not args.dont_ignore_missing) migrate_ext(
file,
schema,
ignore_missing=args.skip_missing,
)