mirror of
https://github.com/lnbits/lnbits.git
synced 2025-06-23 15:22:13 +02:00
Serverside Pagination for payments (#1613)
* initial backend support
* implement payments pagination on frontend
* implement search for payments api
* fix pyright issues
* sqlite support for searching
* backwards compatability
* formatting, small fixes
* small optimization
* fix sorting issue, add error handling
* GET payments test
* filter by dates, use List instead of list
* fix sqlite
* update bundle
* test old payments endpoint aswell
* refactor for easier review
* optimise test
* revert unnecessary change
---------
Co-authored-by: dni ⚡ <office@dnilabs.com>
This commit is contained in:
parent
45b199a8ef
commit
c0f66989cb
@ -7,12 +7,12 @@ from uuid import uuid4
|
|||||||
import shortuuid
|
import shortuuid
|
||||||
|
|
||||||
from lnbits import bolt11
|
from lnbits import bolt11
|
||||||
from lnbits.db import COCKROACH, POSTGRES, Connection, Filters
|
from lnbits.db import Connection, Filters, Page
|
||||||
from lnbits.extension_manager import InstallableExtension
|
from lnbits.extension_manager import InstallableExtension
|
||||||
from lnbits.settings import AdminSettings, EditableSettings, SuperSettings, settings
|
from lnbits.settings import AdminSettings, EditableSettings, SuperSettings, settings
|
||||||
|
|
||||||
from . import db
|
from . import db
|
||||||
from .models import BalanceCheck, Payment, TinyURL, User, Wallet
|
from .models import BalanceCheck, Payment, PaymentFilters, TinyURL, User, Wallet
|
||||||
|
|
||||||
# accounts
|
# accounts
|
||||||
# --------
|
# --------
|
||||||
@ -343,7 +343,7 @@ async def get_latest_payments_by_extension(ext_name: str, ext_id: str, limit: in
|
|||||||
return rows
|
return rows
|
||||||
|
|
||||||
|
|
||||||
async def get_payments(
|
async def get_payments_paginated(
|
||||||
*,
|
*,
|
||||||
wallet_id: Optional[str] = None,
|
wallet_id: Optional[str] = None,
|
||||||
complete: bool = False,
|
complete: bool = False,
|
||||||
@ -352,28 +352,23 @@ async def get_payments(
|
|||||||
incoming: bool = False,
|
incoming: bool = False,
|
||||||
since: Optional[int] = None,
|
since: Optional[int] = None,
|
||||||
exclude_uncheckable: bool = False,
|
exclude_uncheckable: bool = False,
|
||||||
filters: Optional[Filters[Payment]] = None,
|
filters: Optional[Filters[PaymentFilters]] = None,
|
||||||
conn: Optional[Connection] = None,
|
conn: Optional[Connection] = None,
|
||||||
) -> List[Payment]:
|
) -> Page[Payment]:
|
||||||
"""
|
"""
|
||||||
Filters payments to be returned by complete | pending | outgoing | incoming.
|
Filters payments to be returned by complete | pending | outgoing | incoming.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
args: List[Any] = []
|
values: List[Any] = []
|
||||||
clause: List[str] = []
|
clause: List[str] = []
|
||||||
|
|
||||||
if since is not None:
|
if since is not None:
|
||||||
if db.type == POSTGRES:
|
clause.append(f"time > {db.timestamp_placeholder}")
|
||||||
clause.append("time > to_timestamp(?)")
|
values.append(since)
|
||||||
elif db.type == COCKROACH:
|
|
||||||
clause.append("time > cast(? AS timestamp)")
|
|
||||||
else:
|
|
||||||
clause.append("time > ?")
|
|
||||||
args.append(since)
|
|
||||||
|
|
||||||
if wallet_id:
|
if wallet_id:
|
||||||
clause.append("wallet = ?")
|
clause.append("wallet = ?")
|
||||||
args.append(wallet_id)
|
values.append(wallet_id)
|
||||||
|
|
||||||
if complete and pending:
|
if complete and pending:
|
||||||
pass
|
pass
|
||||||
@ -397,21 +392,54 @@ async def get_payments(
|
|||||||
clause.append("checking_id NOT LIKE 'temp_%'")
|
clause.append("checking_id NOT LIKE 'temp_%'")
|
||||||
clause.append("checking_id NOT LIKE 'internal_%'")
|
clause.append("checking_id NOT LIKE 'internal_%'")
|
||||||
|
|
||||||
if not filters:
|
return await (conn or db).fetch_page(
|
||||||
filters = Filters(limit=None, offset=None)
|
"SELECT * FROM apipayments",
|
||||||
|
clause,
|
||||||
rows = await (conn or db).fetchall(
|
values,
|
||||||
f"""
|
filters=filters,
|
||||||
SELECT *
|
model=Payment,
|
||||||
FROM apipayments
|
|
||||||
{filters.where(clause)}
|
|
||||||
ORDER BY time DESC
|
|
||||||
{filters.pagination()}
|
|
||||||
""",
|
|
||||||
filters.values(args),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return [Payment.from_row(row) for row in rows]
|
|
||||||
|
async def get_payments(
|
||||||
|
*,
|
||||||
|
wallet_id: Optional[str] = None,
|
||||||
|
complete: bool = False,
|
||||||
|
pending: bool = False,
|
||||||
|
outgoing: bool = False,
|
||||||
|
incoming: bool = False,
|
||||||
|
since: Optional[int] = None,
|
||||||
|
exclude_uncheckable: bool = False,
|
||||||
|
filters: Optional[Filters[PaymentFilters]] = None,
|
||||||
|
conn: Optional[Connection] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
offset: Optional[int] = None,
|
||||||
|
) -> list[Payment]:
|
||||||
|
"""
|
||||||
|
Filters payments to be returned by complete | pending | outgoing | incoming.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not filters:
|
||||||
|
filters = Filters()
|
||||||
|
|
||||||
|
if limit:
|
||||||
|
filters.limit = limit
|
||||||
|
if offset:
|
||||||
|
filters.offset = offset
|
||||||
|
|
||||||
|
page = await get_payments_paginated(
|
||||||
|
wallet_id=wallet_id,
|
||||||
|
complete=complete,
|
||||||
|
pending=pending,
|
||||||
|
outgoing=outgoing,
|
||||||
|
incoming=incoming,
|
||||||
|
since=since,
|
||||||
|
exclude_uncheckable=exclude_uncheckable,
|
||||||
|
filters=filters,
|
||||||
|
conn=conn,
|
||||||
|
)
|
||||||
|
|
||||||
|
return page.data
|
||||||
|
|
||||||
|
|
||||||
async def delete_expired_invoices(
|
async def delete_expired_invoices(
|
||||||
@ -454,7 +482,6 @@ async def create_payment(
|
|||||||
webhook: Optional[str] = None,
|
webhook: Optional[str] = None,
|
||||||
conn: Optional[Connection] = None,
|
conn: Optional[Connection] = None,
|
||||||
) -> Payment:
|
) -> Payment:
|
||||||
|
|
||||||
# todo: add this when tests are fixed
|
# todo: add this when tests are fixed
|
||||||
# previous_payment = await get_wallet_payment(wallet_id, payment_hash, conn=conn)
|
# previous_payment = await get_wallet_payment(wallet_id, payment_hash, conn=conn)
|
||||||
# assert previous_payment is None, "Payment already exists"
|
# assert previous_payment is None, "Payment already exists"
|
||||||
@ -514,7 +541,6 @@ async def update_payment_details(
|
|||||||
new_checking_id: Optional[str] = None,
|
new_checking_id: Optional[str] = None,
|
||||||
conn: Optional[Connection] = None,
|
conn: Optional[Connection] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
set_clause: List[str] = []
|
set_clause: List[str] = []
|
||||||
set_variables: List[Any] = []
|
set_variables: List[Any] = []
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ from lnurl import encode as lnurl_encode
|
|||||||
from loguru import logger
|
from loguru import logger
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from lnbits.db import Connection
|
from lnbits.db import Connection, FilterModel, FromRowModel
|
||||||
from lnbits.helpers import url_for
|
from lnbits.helpers import url_for
|
||||||
from lnbits.settings import get_wallet_class, settings
|
from lnbits.settings import get_wallet_class, settings
|
||||||
from lnbits.wallets.base import PaymentStatus
|
from lnbits.wallets.base import PaymentStatus
|
||||||
@ -86,7 +86,7 @@ class User(BaseModel):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class Payment(BaseModel):
|
class Payment(FromRowModel):
|
||||||
checking_id: str
|
checking_id: str
|
||||||
pending: bool
|
pending: bool
|
||||||
amount: int
|
amount: int
|
||||||
@ -214,6 +214,24 @@ class Payment(BaseModel):
|
|||||||
await delete_payment(self.checking_id, conn=conn)
|
await delete_payment(self.checking_id, conn=conn)
|
||||||
|
|
||||||
|
|
||||||
|
class PaymentFilters(FilterModel):
|
||||||
|
__search_fields__ = ["memo", "amount"]
|
||||||
|
|
||||||
|
checking_id: str
|
||||||
|
amount: int
|
||||||
|
fee: int
|
||||||
|
memo: Optional[str]
|
||||||
|
time: datetime.datetime
|
||||||
|
bolt11: str
|
||||||
|
preimage: str
|
||||||
|
payment_hash: str
|
||||||
|
expiry: Optional[datetime.datetime]
|
||||||
|
extra: Dict = {}
|
||||||
|
wallet_id: str
|
||||||
|
webhook: Optional[str]
|
||||||
|
webhook_status: Optional[int]
|
||||||
|
|
||||||
|
|
||||||
class BalanceCheck(BaseModel):
|
class BalanceCheck(BaseModel):
|
||||||
wallet: str
|
wallet: str
|
||||||
service: str
|
service: str
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
// update cache version every time there is a new deployment
|
// update cache version every time there is a new deployment
|
||||||
// so the service worker reinitializes the cache
|
// so the service worker reinitializes the cache
|
||||||
const CACHE_VERSION = 5
|
const CACHE_VERSION = 6
|
||||||
const CURRENT_CACHE = `lnbits-${CACHE_VERSION}-`
|
const CURRENT_CACHE = `lnbits-${CACHE_VERSION}-`
|
||||||
|
|
||||||
const getApiKey = request => {
|
const getApiKey = request => {
|
||||||
|
@ -152,14 +152,14 @@ new Vue({
|
|||||||
field: 'memo'
|
field: 'memo'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'date',
|
name: 'time',
|
||||||
align: 'left',
|
align: 'left',
|
||||||
label: this.$t('date'),
|
label: this.$t('date'),
|
||||||
field: 'date',
|
field: 'date',
|
||||||
sortable: true
|
sortable: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'sat',
|
name: 'amount',
|
||||||
align: 'right',
|
align: 'right',
|
||||||
label: this.$t('amount') + ' (' + LNBITS_DENOMINATION + ')',
|
label: this.$t('amount') + ' (' + LNBITS_DENOMINATION + ')',
|
||||||
field: 'sat',
|
field: 'sat',
|
||||||
@ -173,9 +173,14 @@ new Vue({
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
pagination: {
|
pagination: {
|
||||||
rowsPerPage: 10
|
rowsPerPage: 10,
|
||||||
|
page: 1,
|
||||||
|
sortBy: 'time',
|
||||||
|
descending: true,
|
||||||
|
rowsNumber: 10
|
||||||
},
|
},
|
||||||
filter: null
|
filter: null,
|
||||||
|
loading: false
|
||||||
},
|
},
|
||||||
paymentsChart: {
|
paymentsChart: {
|
||||||
show: false
|
show: false
|
||||||
@ -695,16 +700,35 @@ new Vue({
|
|||||||
LNbits.href.deleteWallet(walletId, user)
|
LNbits.href.deleteWallet(walletId, user)
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
fetchPayments: function () {
|
fetchPayments: function (props) {
|
||||||
return LNbits.api.getPayments(this.g.wallet).then(response => {
|
// Props are passed by qasar when pagination or sorting changes
|
||||||
this.payments = response.data
|
if (props) {
|
||||||
.map(obj => {
|
this.paymentsTable.pagination = props.pagination
|
||||||
|
}
|
||||||
|
let pagination = this.paymentsTable.pagination
|
||||||
|
this.paymentsTable.loading = true
|
||||||
|
const query = {
|
||||||
|
limit: pagination.rowsPerPage,
|
||||||
|
offset: (pagination.page - 1) * pagination.rowsPerPage,
|
||||||
|
sortby: pagination.sortBy ?? 'time',
|
||||||
|
direction: pagination.descending ? 'desc' : 'asc'
|
||||||
|
}
|
||||||
|
if (this.paymentsTable.filter) {
|
||||||
|
query.search = this.paymentsTable.filter
|
||||||
|
}
|
||||||
|
return LNbits.api
|
||||||
|
.getPayments(this.g.wallet, query)
|
||||||
|
.then(response => {
|
||||||
|
this.paymentsTable.loading = false
|
||||||
|
this.paymentsTable.pagination.rowsNumber = response.data.total
|
||||||
|
this.payments = response.data.data.map(obj => {
|
||||||
return LNbits.map.payment(obj)
|
return LNbits.map.payment(obj)
|
||||||
})
|
})
|
||||||
.sort((a, b) => {
|
})
|
||||||
return b.time - a.time
|
.catch(err => {
|
||||||
})
|
this.paymentsTable.loading = false
|
||||||
})
|
LNbits.utils.notifyApiError(err)
|
||||||
|
})
|
||||||
},
|
},
|
||||||
fetchBalance: function () {
|
fetchBalance: function () {
|
||||||
LNbits.api.getWallet(this.g.wallet).then(response => {
|
LNbits.api.getWallet(this.g.wallet).then(response => {
|
||||||
|
@ -125,7 +125,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<q-input
|
<q-input
|
||||||
v-if="payments.length > 10"
|
|
||||||
filled
|
filled
|
||||||
dense
|
dense
|
||||||
clearable
|
clearable
|
||||||
@ -138,12 +137,14 @@
|
|||||||
<q-table
|
<q-table
|
||||||
dense
|
dense
|
||||||
flat
|
flat
|
||||||
:data="filteredPayments"
|
:data="payments"
|
||||||
:row-key="paymentTableRowKey"
|
:row-key="paymentTableRowKey"
|
||||||
:columns="paymentsTable.columns"
|
:columns="paymentsTable.columns"
|
||||||
:pagination.sync="paymentsTable.pagination"
|
:pagination.sync="paymentsTable.pagination"
|
||||||
:no-data-label="$t('no_transactions')"
|
:no-data-label="$t('no_transactions')"
|
||||||
:filter="paymentsTable.filter"
|
:filter="paymentsTable.filter"
|
||||||
|
:loading="paymentsTable.loading"
|
||||||
|
@request="fetchPayments"
|
||||||
>
|
>
|
||||||
{% raw %}
|
{% raw %}
|
||||||
<template v-slot:header="props">
|
<template v-slot:header="props">
|
||||||
@ -192,14 +193,14 @@
|
|||||||
</q-badge>
|
</q-badge>
|
||||||
{{ props.row.memo }}
|
{{ props.row.memo }}
|
||||||
</q-td>
|
</q-td>
|
||||||
<q-td auto-width key="date" :props="props">
|
<q-td auto-width key="time" :props="props">
|
||||||
<q-tooltip>{{ props.row.date }}</q-tooltip>
|
<q-tooltip>{{ props.row.date }}</q-tooltip>
|
||||||
{{ props.row.dateFrom }}
|
{{ props.row.dateFrom }}
|
||||||
</q-td>
|
</q-td>
|
||||||
{% endraw %}
|
{% endraw %}
|
||||||
<q-td
|
<q-td
|
||||||
auto-width
|
auto-width
|
||||||
key="sat"
|
key="amount"
|
||||||
v-if="'{{LNBITS_DENOMINATION}}' != 'sats'"
|
v-if="'{{LNBITS_DENOMINATION}}' != 'sats'"
|
||||||
:props="props"
|
:props="props"
|
||||||
>{% raw %} {{
|
>{% raw %} {{
|
||||||
@ -207,7 +208,7 @@
|
|||||||
}}
|
}}
|
||||||
</q-td>
|
</q-td>
|
||||||
|
|
||||||
<q-td auto-width key="sat" v-else :props="props">
|
<q-td auto-width key="amount" v-else :props="props">
|
||||||
{{ props.row.fsat }}
|
{{ props.row.fsat }}
|
||||||
</q-td>
|
</q-td>
|
||||||
<q-td auto-width key="fee" :props="props">
|
<q-td auto-width key="fee" :props="props">
|
||||||
|
@ -33,8 +33,8 @@ from lnbits.core.helpers import (
|
|||||||
migrate_extension_database,
|
migrate_extension_database,
|
||||||
stop_extension_background_work,
|
stop_extension_background_work,
|
||||||
)
|
)
|
||||||
from lnbits.core.models import Payment, User, Wallet
|
from lnbits.core.models import Payment, PaymentFilters, User, Wallet
|
||||||
from lnbits.db import Filters
|
from lnbits.db import Filters, Page
|
||||||
from lnbits.decorators import (
|
from lnbits.decorators import (
|
||||||
WalletTypeInfo,
|
WalletTypeInfo,
|
||||||
check_admin,
|
check_admin,
|
||||||
@ -66,6 +66,7 @@ from ..crud import (
|
|||||||
delete_tinyurl,
|
delete_tinyurl,
|
||||||
get_dbversions,
|
get_dbversions,
|
||||||
get_payments,
|
get_payments,
|
||||||
|
get_payments_paginated,
|
||||||
get_standalone_payment,
|
get_standalone_payment,
|
||||||
get_tinyurl,
|
get_tinyurl,
|
||||||
get_tinyurl_by_url,
|
get_tinyurl_by_url,
|
||||||
@ -122,19 +123,19 @@ async def api_update_wallet(
|
|||||||
summary="get list of payments",
|
summary="get list of payments",
|
||||||
response_description="list of payments",
|
response_description="list of payments",
|
||||||
response_model=List[Payment],
|
response_model=List[Payment],
|
||||||
openapi_extra=generate_filter_params_openapi(Payment),
|
openapi_extra=generate_filter_params_openapi(PaymentFilters),
|
||||||
)
|
)
|
||||||
async def api_payments(
|
async def api_payments(
|
||||||
wallet: WalletTypeInfo = Depends(get_key_type),
|
wallet: WalletTypeInfo = Depends(get_key_type),
|
||||||
filters: Filters = Depends(parse_filters(Payment)),
|
filters: Filters = Depends(parse_filters(PaymentFilters)),
|
||||||
):
|
):
|
||||||
pendingPayments = await get_payments(
|
pending_payments = await get_payments(
|
||||||
wallet_id=wallet.wallet.id,
|
wallet_id=wallet.wallet.id,
|
||||||
pending=True,
|
pending=True,
|
||||||
exclude_uncheckable=True,
|
exclude_uncheckable=True,
|
||||||
filters=filters,
|
filters=filters,
|
||||||
)
|
)
|
||||||
for payment in pendingPayments:
|
for payment in pending_payments:
|
||||||
await check_transaction_status(
|
await check_transaction_status(
|
||||||
wallet_id=payment.wallet_id, payment_hash=payment.payment_hash
|
wallet_id=payment.wallet_id, payment_hash=payment.payment_hash
|
||||||
)
|
)
|
||||||
@ -146,6 +147,37 @@ async def api_payments(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@core_app.get(
|
||||||
|
"/api/v1/payments/paginated",
|
||||||
|
name="Payment List",
|
||||||
|
summary="get paginated list of payments",
|
||||||
|
response_description="list of payments",
|
||||||
|
response_model=Page[Payment],
|
||||||
|
openapi_extra=generate_filter_params_openapi(PaymentFilters),
|
||||||
|
)
|
||||||
|
async def api_payments_paginated(
|
||||||
|
wallet: WalletTypeInfo = Depends(get_key_type),
|
||||||
|
filters: Filters = Depends(parse_filters(PaymentFilters)),
|
||||||
|
):
|
||||||
|
pending = await get_payments_paginated(
|
||||||
|
wallet_id=wallet.wallet.id,
|
||||||
|
pending=True,
|
||||||
|
exclude_uncheckable=True,
|
||||||
|
filters=filters,
|
||||||
|
)
|
||||||
|
for payment in pending.data:
|
||||||
|
await check_transaction_status(
|
||||||
|
wallet_id=payment.wallet_id, payment_hash=payment.payment_hash
|
||||||
|
)
|
||||||
|
page = await get_payments_paginated(
|
||||||
|
wallet_id=wallet.wallet.id,
|
||||||
|
pending=True,
|
||||||
|
complete=True,
|
||||||
|
filters=filters,
|
||||||
|
)
|
||||||
|
return page
|
||||||
|
|
||||||
|
|
||||||
class CreateInvoiceData(BaseModel):
|
class CreateInvoiceData(BaseModel):
|
||||||
out: Optional[bool] = True
|
out: Optional[bool] = True
|
||||||
amount: float = Query(None, ge=0)
|
amount: float = Query(None, ge=0)
|
||||||
@ -788,7 +820,6 @@ async def api_install_extension(
|
|||||||
|
|
||||||
@core_app.delete("/api/v1/extension/{ext_id}")
|
@core_app.delete("/api/v1/extension/{ext_id}")
|
||||||
async def api_uninstall_extension(ext_id: str, user: User = Depends(check_admin)):
|
async def api_uninstall_extension(ext_id: str, user: User = Depends(check_admin)):
|
||||||
|
|
||||||
installable_extensions = await InstallableExtension.get_installable_extensions()
|
installable_extensions = await InstallableExtension.get_installable_extensions()
|
||||||
|
|
||||||
extensions = [e for e in installable_extensions if e.id == ext_id]
|
extensions = [e for e in installable_extensions if e.id == ext_id]
|
||||||
|
279
lnbits/db.py
279
lnbits/db.py
@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
@ -5,7 +7,8 @@ import re
|
|||||||
import time
|
import time
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any, Generic, List, Optional, Tuple, Type, TypeVar
|
from sqlite3 import Row
|
||||||
|
from typing import Any, Generic, List, Literal, Optional, Type, TypeVar
|
||||||
|
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from pydantic import BaseModel, ValidationError
|
from pydantic import BaseModel, ValidationError
|
||||||
@ -19,6 +22,51 @@ POSTGRES = "POSTGRES"
|
|||||||
COCKROACH = "COCKROACH"
|
COCKROACH = "COCKROACH"
|
||||||
SQLITE = "SQLITE"
|
SQLITE = "SQLITE"
|
||||||
|
|
||||||
|
if settings.lnbits_database_url:
|
||||||
|
database_uri = settings.lnbits_database_url
|
||||||
|
|
||||||
|
if database_uri.startswith("cockroachdb://"):
|
||||||
|
DB_TYPE = COCKROACH
|
||||||
|
else:
|
||||||
|
DB_TYPE = POSTGRES
|
||||||
|
|
||||||
|
from psycopg2.extensions import DECIMAL, new_type, register_type
|
||||||
|
|
||||||
|
def _parse_timestamp(value, _):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
f = "%Y-%m-%d %H:%M:%S.%f"
|
||||||
|
if "." not in value:
|
||||||
|
f = "%Y-%m-%d %H:%M:%S"
|
||||||
|
return time.mktime(datetime.datetime.strptime(value, f).timetuple())
|
||||||
|
|
||||||
|
register_type(
|
||||||
|
new_type(
|
||||||
|
DECIMAL.values,
|
||||||
|
"DEC2FLOAT",
|
||||||
|
lambda value, curs: float(value) if value is not None else None,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
register_type(
|
||||||
|
new_type(
|
||||||
|
(1082, 1083, 1266),
|
||||||
|
"DATE2INT",
|
||||||
|
lambda value, curs: time.mktime(value.timetuple())
|
||||||
|
if value is not None
|
||||||
|
else None,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
register_type(new_type((1184, 1114), "TIMESTAMP2INT", _parse_timestamp))
|
||||||
|
else:
|
||||||
|
if os.path.isdir(settings.lnbits_data_folder):
|
||||||
|
DB_TYPE = SQLITE
|
||||||
|
else:
|
||||||
|
raise NotADirectoryError(
|
||||||
|
f"LNBITS_DATA_FOLDER named {settings.lnbits_data_folder} was not created"
|
||||||
|
f" - please 'mkdir {settings.lnbits_data_folder}' and try again"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Compat:
|
class Compat:
|
||||||
type: Optional[str] = "<inherited>"
|
type: Optional[str] = "<inherited>"
|
||||||
@ -68,6 +116,16 @@ class Compat:
|
|||||||
return "BIGINT"
|
return "BIGINT"
|
||||||
return "INT"
|
return "INT"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@property
|
||||||
|
def timestamp_placeholder(cls):
|
||||||
|
if DB_TYPE == POSTGRES:
|
||||||
|
return "to_timestamp(?)"
|
||||||
|
elif DB_TYPE == COCKROACH:
|
||||||
|
return "cast(? AS timestamp)"
|
||||||
|
else:
|
||||||
|
return "?"
|
||||||
|
|
||||||
|
|
||||||
class Connection(Compat):
|
class Connection(Compat):
|
||||||
def __init__(self, conn: AsyncConnection, txn, typ, name, schema):
|
def __init__(self, conn: AsyncConnection, txn, typ, name, schema):
|
||||||
@ -87,17 +145,21 @@ class Connection(Compat):
|
|||||||
# strip html
|
# strip html
|
||||||
CLEANR = re.compile("<.*?>|&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-f]{1,6});")
|
CLEANR = re.compile("<.*?>|&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-f]{1,6});")
|
||||||
|
|
||||||
def cleanhtml(raw_html):
|
|
||||||
if isinstance(raw_html, str):
|
|
||||||
cleantext = re.sub(CLEANR, "", raw_html)
|
|
||||||
return cleantext
|
|
||||||
else:
|
|
||||||
return raw_html
|
|
||||||
|
|
||||||
# tuple to list and back to tuple
|
# tuple to list and back to tuple
|
||||||
value_list = [values] if isinstance(values, str) else list(values)
|
raw_values = [values] if isinstance(values, str) else list(values)
|
||||||
values = tuple([cleanhtml(val) for val in value_list])
|
values = []
|
||||||
return values
|
for raw_value in raw_values:
|
||||||
|
if isinstance(raw_value, str):
|
||||||
|
values.append(re.sub(CLEANR, "", raw_value))
|
||||||
|
elif isinstance(raw_value, datetime.datetime):
|
||||||
|
ts = raw_value.timestamp()
|
||||||
|
if self.type == SQLITE:
|
||||||
|
values.append(int(ts))
|
||||||
|
else:
|
||||||
|
values.append(ts)
|
||||||
|
else:
|
||||||
|
values.append(raw_value)
|
||||||
|
return tuple(values)
|
||||||
|
|
||||||
async def fetchall(self, query: str, values: tuple = ()) -> list:
|
async def fetchall(self, query: str, values: tuple = ()) -> list:
|
||||||
result = await self.conn.execute(
|
result = await self.conn.execute(
|
||||||
@ -113,6 +175,51 @@ class Connection(Compat):
|
|||||||
await result.close()
|
await result.close()
|
||||||
return row
|
return row
|
||||||
|
|
||||||
|
async def fetch_page(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
where: Optional[List[str]] = None,
|
||||||
|
values: Optional[List[str]] = None,
|
||||||
|
filters: Optional[Filters] = None,
|
||||||
|
model: Optional[Type[TRowModel]] = None,
|
||||||
|
) -> Page[TRowModel]:
|
||||||
|
if not filters:
|
||||||
|
filters = Filters()
|
||||||
|
clause = filters.where(where)
|
||||||
|
parsed_values = filters.values(values)
|
||||||
|
|
||||||
|
rows = await self.fetchall(
|
||||||
|
f"""
|
||||||
|
{query}
|
||||||
|
{clause}
|
||||||
|
{filters.order_by()}
|
||||||
|
{filters.pagination()}
|
||||||
|
""",
|
||||||
|
parsed_values,
|
||||||
|
)
|
||||||
|
if rows:
|
||||||
|
# no need for extra query if no pagination is specified
|
||||||
|
if filters.offset or filters.limit:
|
||||||
|
count = await self.fetchone(
|
||||||
|
f"""
|
||||||
|
SELECT COUNT(*) FROM (
|
||||||
|
{query}
|
||||||
|
{clause}
|
||||||
|
) as count
|
||||||
|
""",
|
||||||
|
parsed_values,
|
||||||
|
)
|
||||||
|
count = int(count[0])
|
||||||
|
else:
|
||||||
|
count = len(rows)
|
||||||
|
else:
|
||||||
|
count = 0
|
||||||
|
|
||||||
|
return Page(
|
||||||
|
data=[model.from_row(row) for row in rows] if model else rows,
|
||||||
|
total=count,
|
||||||
|
)
|
||||||
|
|
||||||
async def execute(self, query: str, values: tuple = ()):
|
async def execute(self, query: str, values: tuple = ()):
|
||||||
return await self.conn.execute(
|
return await self.conn.execute(
|
||||||
self.rewrite_query(query), self.rewrite_values(values)
|
self.rewrite_query(query), self.rewrite_values(values)
|
||||||
@ -122,57 +229,17 @@ class Connection(Compat):
|
|||||||
class Database(Compat):
|
class Database(Compat):
|
||||||
def __init__(self, db_name: str):
|
def __init__(self, db_name: str):
|
||||||
self.name = db_name
|
self.name = db_name
|
||||||
|
self.schema = self.name
|
||||||
|
self.type = DB_TYPE
|
||||||
|
|
||||||
if settings.lnbits_database_url:
|
if DB_TYPE == SQLITE:
|
||||||
|
self.path = os.path.join(
|
||||||
|
settings.lnbits_data_folder, f"{self.name}.sqlite3"
|
||||||
|
)
|
||||||
|
database_uri = f"sqlite:///{self.path}"
|
||||||
|
else:
|
||||||
database_uri = settings.lnbits_database_url
|
database_uri = settings.lnbits_database_url
|
||||||
|
|
||||||
if database_uri.startswith("cockroachdb://"):
|
|
||||||
self.type = COCKROACH
|
|
||||||
else:
|
|
||||||
self.type = POSTGRES
|
|
||||||
|
|
||||||
from psycopg2.extensions import DECIMAL, new_type, register_type
|
|
||||||
|
|
||||||
def _parse_timestamp(value, _):
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
f = "%Y-%m-%d %H:%M:%S.%f"
|
|
||||||
if "." not in value:
|
|
||||||
f = "%Y-%m-%d %H:%M:%S"
|
|
||||||
return time.mktime(datetime.datetime.strptime(value, f).timetuple())
|
|
||||||
|
|
||||||
register_type(
|
|
||||||
new_type(
|
|
||||||
DECIMAL.values,
|
|
||||||
"DEC2FLOAT",
|
|
||||||
lambda value, curs: float(value) if value is not None else None,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
register_type(
|
|
||||||
new_type(
|
|
||||||
(1082, 1083, 1266),
|
|
||||||
"DATE2INT",
|
|
||||||
lambda value, curs: time.mktime(value.timetuple())
|
|
||||||
if value is not None
|
|
||||||
else None,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
register_type(new_type((1184, 1114), "TIMESTAMP2INT", _parse_timestamp))
|
|
||||||
else:
|
|
||||||
if os.path.isdir(settings.lnbits_data_folder):
|
|
||||||
self.path = os.path.join(
|
|
||||||
settings.lnbits_data_folder, f"{self.name}.sqlite3"
|
|
||||||
)
|
|
||||||
database_uri = f"sqlite:///{self.path}"
|
|
||||||
self.type = SQLITE
|
|
||||||
else:
|
|
||||||
raise NotADirectoryError(
|
|
||||||
f"LNBITS_DATA_FOLDER named {settings.lnbits_data_folder} was not created"
|
|
||||||
f" - please 'mkdir {settings.lnbits_data_folder}' and try again"
|
|
||||||
)
|
|
||||||
logger.trace(f"database {self.type} added for {self.name}")
|
|
||||||
self.schema = self.name
|
|
||||||
if self.name.startswith("ext_"):
|
if self.name.startswith("ext_"):
|
||||||
self.schema = self.name[4:]
|
self.schema = self.name[4:]
|
||||||
else:
|
else:
|
||||||
@ -181,6 +248,8 @@ class Database(Compat):
|
|||||||
self.engine = create_engine(database_uri, strategy=ASYNCIO_STRATEGY)
|
self.engine = create_engine(database_uri, strategy=ASYNCIO_STRATEGY)
|
||||||
self.lock = asyncio.Lock()
|
self.lock = asyncio.Lock()
|
||||||
|
|
||||||
|
logger.trace(f"database {self.type} added for {self.name}")
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def connect(self):
|
async def connect(self):
|
||||||
await self.lock.acquire()
|
await self.lock.acquire()
|
||||||
@ -215,6 +284,17 @@ class Database(Compat):
|
|||||||
await result.close()
|
await result.close()
|
||||||
return row
|
return row
|
||||||
|
|
||||||
|
async def fetch_page(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
where: Optional[List[str]] = None,
|
||||||
|
values: Optional[List[str]] = None,
|
||||||
|
filters: Optional[Filters] = None,
|
||||||
|
model: Optional[Type[TRowModel]] = None,
|
||||||
|
) -> Page[TRowModel]:
|
||||||
|
async with self.connect() as conn:
|
||||||
|
return await conn.fetch_page(query, where, values, filters, model)
|
||||||
|
|
||||||
async def execute(self, query: str, values: tuple = ()):
|
async def execute(self, query: str, values: tuple = ()):
|
||||||
async with self.connect() as conn:
|
async with self.connect() as conn:
|
||||||
return await conn.execute(query, values)
|
return await conn.execute(query, values)
|
||||||
@ -229,6 +309,8 @@ class Operator(Enum):
|
|||||||
LT = "lt"
|
LT = "lt"
|
||||||
EQ = "eq"
|
EQ = "eq"
|
||||||
NE = "ne"
|
NE = "ne"
|
||||||
|
GE = "ge"
|
||||||
|
LE = "le"
|
||||||
INCLUDE = "in"
|
INCLUDE = "in"
|
||||||
EXCLUDE = "ex"
|
EXCLUDE = "ex"
|
||||||
|
|
||||||
@ -246,21 +328,45 @@ class Operator(Enum):
|
|||||||
return ">"
|
return ">"
|
||||||
elif self == Operator.LT:
|
elif self == Operator.LT:
|
||||||
return "<"
|
return "<"
|
||||||
|
elif self == Operator.GE:
|
||||||
|
return ">="
|
||||||
|
elif self == Operator.LE:
|
||||||
|
return "<="
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown SQL Operator")
|
raise ValueError("Unknown SQL Operator")
|
||||||
|
|
||||||
|
|
||||||
|
class FromRowModel(BaseModel):
|
||||||
|
@classmethod
|
||||||
|
def from_row(cls, row: Row):
|
||||||
|
return cls(**dict(row))
|
||||||
|
|
||||||
|
|
||||||
|
class FilterModel(BaseModel):
|
||||||
|
__search_fields__: List[str] = []
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
TModel = TypeVar("TModel", bound=BaseModel)
|
TModel = TypeVar("TModel", bound=BaseModel)
|
||||||
|
TRowModel = TypeVar("TRowModel", bound=FromRowModel)
|
||||||
|
TFilterModel = TypeVar("TFilterModel", bound=FilterModel)
|
||||||
|
|
||||||
|
|
||||||
class Filter(BaseModel, Generic[TModel]):
|
class Page(BaseModel, Generic[T]):
|
||||||
|
data: list[T]
|
||||||
|
total: int
|
||||||
|
|
||||||
|
|
||||||
|
class Filter(BaseModel, Generic[TFilterModel]):
|
||||||
field: str
|
field: str
|
||||||
nested: Optional[list[str]]
|
nested: Optional[List[str]]
|
||||||
op: Operator = Operator.EQ
|
op: Operator = Operator.EQ
|
||||||
values: list[Any]
|
values: list[Any]
|
||||||
|
|
||||||
|
model: Optional[Type[TFilterModel]]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_query(cls, key: str, raw_values: list[Any], model: Type[TModel]):
|
def parse_query(cls, key: str, raw_values: list[Any], model: Type[TFilterModel]):
|
||||||
# Key format:
|
# Key format:
|
||||||
# key[operator]
|
# key[operator]
|
||||||
# e.g. name[eq]
|
# e.g. name[eq]
|
||||||
@ -300,7 +406,7 @@ class Filter(BaseModel, Generic[TModel]):
|
|||||||
else:
|
else:
|
||||||
raise ValueError("Unknown filter field")
|
raise ValueError("Unknown filter field")
|
||||||
|
|
||||||
return cls(field=field, op=op, nested=nested, values=values)
|
return cls(field=field, op=op, nested=nested, values=values, model=model)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def statement(self):
|
def statement(self):
|
||||||
@ -308,18 +414,29 @@ class Filter(BaseModel, Generic[TModel]):
|
|||||||
if self.nested:
|
if self.nested:
|
||||||
for name in self.nested:
|
for name in self.nested:
|
||||||
accessor = f"({accessor} ->> '{name}')"
|
accessor = f"({accessor} ->> '{name}')"
|
||||||
|
if self.model and self.model.__fields__[self.field].type_ == datetime.datetime:
|
||||||
|
placeholder = Compat.timestamp_placeholder
|
||||||
|
else:
|
||||||
|
placeholder = "?"
|
||||||
if self.op in (Operator.INCLUDE, Operator.EXCLUDE):
|
if self.op in (Operator.INCLUDE, Operator.EXCLUDE):
|
||||||
placeholders = ", ".join(["?"] * len(self.values))
|
placeholders = ", ".join([placeholder] * len(self.values))
|
||||||
stmt = [f"{accessor} {self.op.as_sql} ({placeholders})"]
|
stmt = [f"{accessor} {self.op.as_sql} ({placeholders})"]
|
||||||
else:
|
else:
|
||||||
stmt = [f"{accessor} {self.op.as_sql} ?"] * len(self.values)
|
stmt = [f"{accessor} {self.op.as_sql} {placeholder}"] * len(self.values)
|
||||||
return " OR ".join(stmt)
|
return " OR ".join(stmt)
|
||||||
|
|
||||||
|
|
||||||
class Filters(BaseModel, Generic[TModel]):
|
class Filters(BaseModel, Generic[TFilterModel]):
|
||||||
filters: List[Filter[TModel]] = []
|
filters: List[Filter[TFilterModel]] = []
|
||||||
limit: Optional[int]
|
search: Optional[str] = None
|
||||||
offset: Optional[int]
|
|
||||||
|
offset: Optional[int] = None
|
||||||
|
limit: Optional[int] = None
|
||||||
|
|
||||||
|
sortby: Optional[str] = None
|
||||||
|
direction: Optional[Literal["asc", "desc"]] = None
|
||||||
|
|
||||||
|
model: Optional[Type[TFilterModel]] = None
|
||||||
|
|
||||||
def pagination(self) -> str:
|
def pagination(self) -> str:
|
||||||
stmt = ""
|
stmt = ""
|
||||||
@ -329,16 +446,36 @@ class Filters(BaseModel, Generic[TModel]):
|
|||||||
stmt += f"OFFSET {self.offset}"
|
stmt += f"OFFSET {self.offset}"
|
||||||
return stmt
|
return stmt
|
||||||
|
|
||||||
def where(self, where_stmts: List[str]) -> str:
|
def where(self, where_stmts: Optional[List[str]] = None) -> str:
|
||||||
|
if not where_stmts:
|
||||||
|
where_stmts = []
|
||||||
if self.filters:
|
if self.filters:
|
||||||
for filter in self.filters:
|
for filter in self.filters:
|
||||||
where_stmts.append(filter.statement)
|
where_stmts.append(filter.statement)
|
||||||
|
if self.search and self.model:
|
||||||
|
if DB_TYPE == POSTGRES:
|
||||||
|
where_stmts.append(
|
||||||
|
f"lower(concat({f', '.join(self.model.__search_fields__)})) LIKE ?"
|
||||||
|
)
|
||||||
|
elif DB_TYPE == SQLITE:
|
||||||
|
where_stmts.append(
|
||||||
|
f"lower({'||'.join(self.model.__search_fields__)}) LIKE ?"
|
||||||
|
)
|
||||||
if where_stmts:
|
if where_stmts:
|
||||||
return "WHERE " + " AND ".join(where_stmts)
|
return "WHERE " + " AND ".join(where_stmts)
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def values(self, values: List[str]) -> Tuple:
|
def order_by(self) -> str:
|
||||||
|
if self.sortby:
|
||||||
|
return f"ORDER BY {self.sortby} {self.direction or 'asc'}"
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def values(self, values: Optional[List[str]] = None) -> tuple:
|
||||||
|
if not values:
|
||||||
|
values = []
|
||||||
if self.filters:
|
if self.filters:
|
||||||
for filter in self.filters:
|
for filter in self.filters:
|
||||||
values.extend(filter.values)
|
values.extend(filter.values)
|
||||||
|
if self.search and self.model:
|
||||||
|
values.append(f"%{self.search}%")
|
||||||
return tuple(values)
|
return tuple(values)
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from typing import Optional, Type
|
from typing import Literal, Optional, Type
|
||||||
|
|
||||||
from fastapi import HTTPException, Request, Security, status
|
from fastapi import Query, Request, Security, status
|
||||||
|
from fastapi.exceptions import HTTPException
|
||||||
from fastapi.openapi.models import APIKey, APIKeyIn
|
from fastapi.openapi.models import APIKey, APIKeyIn
|
||||||
from fastapi.security import APIKeyHeader, APIKeyQuery
|
from fastapi.security import APIKeyHeader, APIKeyQuery
|
||||||
from fastapi.security.base import SecurityBase
|
from fastapi.security.base import SecurityBase
|
||||||
from pydantic import BaseModel
|
|
||||||
from pydantic.types import UUID4
|
from pydantic.types import UUID4
|
||||||
|
|
||||||
from lnbits.core.crud import get_user, get_wallet_for_key
|
from lnbits.core.crud import get_user, get_wallet_for_key
|
||||||
from lnbits.core.models import User, Wallet
|
from lnbits.core.models import User, Wallet
|
||||||
from lnbits.db import Filter, Filters
|
from lnbits.db import Filter, Filters, TFilterModel
|
||||||
from lnbits.requestvars import g
|
from lnbits.requestvars import g
|
||||||
from lnbits.settings import settings
|
from lnbits.settings import settings
|
||||||
|
|
||||||
@ -185,7 +185,6 @@ async def require_admin_key(
|
|||||||
api_key_header: str = Security(api_key_header),
|
api_key_header: str = Security(api_key_header),
|
||||||
api_key_query: str = Security(api_key_query),
|
api_key_query: str = Security(api_key_query),
|
||||||
):
|
):
|
||||||
|
|
||||||
token = api_key_header or api_key_query
|
token = api_key_header or api_key_query
|
||||||
|
|
||||||
if not token:
|
if not token:
|
||||||
@ -211,7 +210,6 @@ async def require_invoice_key(
|
|||||||
api_key_header: str = Security(api_key_header),
|
api_key_header: str = Security(api_key_header),
|
||||||
api_key_query: str = Security(api_key_query),
|
api_key_query: str = Security(api_key_query),
|
||||||
):
|
):
|
||||||
|
|
||||||
token = api_key_header or api_key_query
|
token = api_key_header or api_key_query
|
||||||
|
|
||||||
if not token:
|
if not token:
|
||||||
@ -279,14 +277,19 @@ async def check_super_user(usr: UUID4) -> User:
|
|||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
def parse_filters(model: Type[BaseModel]):
|
def parse_filters(model: Type[TFilterModel]):
|
||||||
"""
|
"""
|
||||||
Parses the query params as filters.
|
Parses the query params as filters.
|
||||||
:param model: model used for validation of filter values
|
:param model: model used for validation of filter values
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def dependency(
|
def dependency(
|
||||||
request: Request, limit: Optional[int] = None, offset: Optional[int] = None
|
request: Request,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
offset: Optional[int] = None,
|
||||||
|
sortby: Optional[str] = None,
|
||||||
|
direction: Optional[Literal["asc", "desc"]] = None,
|
||||||
|
search: Optional[str] = Query(None, description="Text based search"),
|
||||||
):
|
):
|
||||||
params = request.query_params
|
params = request.query_params
|
||||||
filters = []
|
filters = []
|
||||||
@ -300,6 +303,10 @@ def parse_filters(model: Type[BaseModel]):
|
|||||||
filters=filters,
|
filters=filters,
|
||||||
limit=limit,
|
limit=limit,
|
||||||
offset=offset,
|
offset=offset,
|
||||||
|
sortby=sortby,
|
||||||
|
direction=direction,
|
||||||
|
search=search,
|
||||||
|
model=model,
|
||||||
)
|
)
|
||||||
|
|
||||||
return dependency
|
return dependency
|
||||||
|
@ -4,7 +4,6 @@ from typing import Any, List, Optional, Type
|
|||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
import shortuuid
|
import shortuuid
|
||||||
from pydantic import BaseModel
|
|
||||||
from pydantic.schema import (
|
from pydantic.schema import (
|
||||||
field_schema,
|
field_schema,
|
||||||
get_flat_models_from_fields,
|
get_flat_models_from_fields,
|
||||||
@ -15,6 +14,7 @@ from lnbits.jinja2_templating import Jinja2Templates
|
|||||||
from lnbits.requestvars import g
|
from lnbits.requestvars import g
|
||||||
from lnbits.settings import settings
|
from lnbits.settings import settings
|
||||||
|
|
||||||
|
from .db import FilterModel
|
||||||
from .extension_manager import get_valid_extensions
|
from .extension_manager import get_valid_extensions
|
||||||
|
|
||||||
|
|
||||||
@ -32,7 +32,6 @@ def url_for(endpoint: str, external: Optional[bool] = False, **params: Any) -> s
|
|||||||
|
|
||||||
|
|
||||||
def template_renderer(additional_folders: Optional[List] = None) -> Jinja2Templates:
|
def template_renderer(additional_folders: Optional[List] = None) -> Jinja2Templates:
|
||||||
|
|
||||||
folders = ["lnbits/templates", "lnbits/core/templates"]
|
folders = ["lnbits/templates", "lnbits/core/templates"]
|
||||||
if additional_folders:
|
if additional_folders:
|
||||||
folders.extend(additional_folders)
|
folders.extend(additional_folders)
|
||||||
@ -96,7 +95,7 @@ def get_current_extension_name() -> str:
|
|||||||
return ext_name
|
return ext_name
|
||||||
|
|
||||||
|
|
||||||
def generate_filter_params_openapi(model: Type[BaseModel], keep_optional=False):
|
def generate_filter_params_openapi(model: Type[FilterModel], keep_optional=False):
|
||||||
"""
|
"""
|
||||||
Generate openapi documentation for Filters. This is intended to be used along parse_filters (see example)
|
Generate openapi documentation for Filters. This is intended to be used along parse_filters (see example)
|
||||||
:param model: Filter model
|
:param model: Filter model
|
||||||
@ -117,6 +116,11 @@ def generate_filter_params_openapi(model: Type[BaseModel], keep_optional=False):
|
|||||||
description = "Supports Filtering"
|
description = "Supports Filtering"
|
||||||
if schema["type"] == "object":
|
if schema["type"] == "object":
|
||||||
description += f". Nested attributes can be filtered too, e.g. `{field.alias}.[additional].[attributes]`"
|
description += f". Nested attributes can be filtered too, e.g. `{field.alias}.[additional].[attributes]`"
|
||||||
|
if (
|
||||||
|
hasattr(model, "__search_fields__")
|
||||||
|
and field.name in model.__search_fields__
|
||||||
|
):
|
||||||
|
description += ". Supports Search"
|
||||||
|
|
||||||
parameter = {
|
parameter = {
|
||||||
"name": field.alias,
|
"name": field.alias,
|
||||||
|
2
lnbits/static/bundle.min.js
vendored
2
lnbits/static/bundle.min.js
vendored
File diff suppressed because one or more lines are too long
@ -67,8 +67,13 @@ window.LNbits = {
|
|||||||
getWallet: function (wallet) {
|
getWallet: function (wallet) {
|
||||||
return this.request('get', '/api/v1/wallet', wallet.inkey)
|
return this.request('get', '/api/v1/wallet', wallet.inkey)
|
||||||
},
|
},
|
||||||
getPayments: function (wallet) {
|
getPayments: function (wallet, query) {
|
||||||
return this.request('get', '/api/v1/payments', wallet.inkey)
|
const params = new URLSearchParams(query)
|
||||||
|
return this.request(
|
||||||
|
'get',
|
||||||
|
'/api/v1/payments/paginated?' + params,
|
||||||
|
wallet.inkey
|
||||||
|
)
|
||||||
},
|
},
|
||||||
getPayment: function (wallet, paymentHash) {
|
getPayment: function (wallet, paymentHash) {
|
||||||
return this.request(
|
return this.request(
|
||||||
@ -185,7 +190,7 @@ window.LNbits = {
|
|||||||
},
|
},
|
||||||
payment: function (data) {
|
payment: function (data) {
|
||||||
obj = {
|
obj = {
|
||||||
checking_id: data.id,
|
checking_id: data.checking_id,
|
||||||
pending: data.pending,
|
pending: data.pending,
|
||||||
amount: data.amount,
|
amount: data.amount,
|
||||||
fee: data.fee,
|
fee: data.fee,
|
||||||
|
@ -1,10 +1,15 @@
|
|||||||
|
import asyncio
|
||||||
import hashlib
|
import hashlib
|
||||||
|
from time import time
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from lnbits import bolt11
|
from lnbits import bolt11
|
||||||
|
from lnbits.core.models import Payment
|
||||||
from lnbits.core.views.api import api_payment
|
from lnbits.core.views.api import api_payment
|
||||||
|
from lnbits.db import DB_TYPE, SQLITE
|
||||||
from lnbits.settings import get_wallet_class
|
from lnbits.settings import get_wallet_class
|
||||||
|
from tests.conftest import CreateInvoiceData, api_payments_create_invoice
|
||||||
|
|
||||||
from ...helpers import get_random_invoice_data, is_fake
|
from ...helpers import get_random_invoice_data, is_fake
|
||||||
|
|
||||||
@ -181,6 +186,66 @@ async def test_pay_invoice_adminkey(client, invoice, adminkey_headers_from):
|
|||||||
assert response.status_code > 300 # should fail
|
assert response.status_code > 300 # should fail
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_payments(client, from_wallet, adminkey_headers_from):
|
||||||
|
# Because sqlite only stores timestamps with milliseconds we have to wait a second to ensure
|
||||||
|
# a different timestamp than previous invoices
|
||||||
|
# due to this limitation both payments (normal and paginated) are tested at the same time as they are almost
|
||||||
|
# identical anyways
|
||||||
|
if DB_TYPE == SQLITE:
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
ts = time()
|
||||||
|
|
||||||
|
fake_data = [
|
||||||
|
CreateInvoiceData(amount=10, memo="aaaa"),
|
||||||
|
CreateInvoiceData(amount=100, memo="bbbb"),
|
||||||
|
CreateInvoiceData(amount=1000, memo="aabb"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for invoice in fake_data:
|
||||||
|
await api_payments_create_invoice(invoice, from_wallet)
|
||||||
|
|
||||||
|
async def get_payments(params: dict):
|
||||||
|
params["time[ge]"] = ts
|
||||||
|
response = await client.get(
|
||||||
|
"/api/v1/payments",
|
||||||
|
params=params,
|
||||||
|
headers=adminkey_headers_from,
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
return [Payment(**payment) for payment in response.json()]
|
||||||
|
|
||||||
|
payments = await get_payments({"sortby": "amount", "direction": "desc", "limit": 2})
|
||||||
|
assert payments[-1].amount < payments[0].amount
|
||||||
|
assert len(payments) == 2
|
||||||
|
|
||||||
|
payments = await get_payments({"offset": 2, "limit": 2})
|
||||||
|
assert len(payments) == 1
|
||||||
|
|
||||||
|
payments = await get_payments({"sortby": "amount", "direction": "asc"})
|
||||||
|
assert payments[-1].amount > payments[0].amount
|
||||||
|
|
||||||
|
payments = await get_payments({"search": "aaa"})
|
||||||
|
assert len(payments) == 1
|
||||||
|
|
||||||
|
payments = await get_payments({"search": "aa"})
|
||||||
|
assert len(payments) == 2
|
||||||
|
|
||||||
|
# amount is in msat
|
||||||
|
payments = await get_payments({"amount[gt]": 10000})
|
||||||
|
assert len(payments) == 2
|
||||||
|
|
||||||
|
response = await client.get(
|
||||||
|
"/api/v1/payments/paginated",
|
||||||
|
params={"limit": 2, "time[ge]": ts},
|
||||||
|
headers=adminkey_headers_from,
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
paginated = response.json()
|
||||||
|
assert len(paginated["data"]) == 2
|
||||||
|
assert paginated["total"] == len(fake_data)
|
||||||
|
|
||||||
|
|
||||||
# check POST /api/v1/payments/decode
|
# check POST /api/v1/payments/decode
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_decode_invoice(client, invoice):
|
async def test_decode_invoice(client, invoice):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user