"]
+
+[tool.poetry.dependencies]
+python = "^3.10 | ^3.9"
+lnbits = {version = "*", allow-prereleases = true}
+
+[tool.poetry.group.dev.dependencies]
+black = "^24.3.0"
+pytest-asyncio = "^0.21.0"
+pytest = "^7.3.2"
+mypy = "^1.5.1"
+pre-commit = "^3.2.2"
+ruff = "^0.3.2"
+types-python-crontab = "^3.2.0.20240703"
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.mypy]
+exclude = "(nostr/*)"
+[[tool.mypy.overrides]]
+module = [
+ "lnbits.*",
+ "lnurl.*",
+ "loguru.*",
+ "fastapi.*",
+ "pydantic.*",
+ "pyqrcode.*",
+ "shortuuid.*",
+ "httpx.*",
+]
+ignore_missing_imports = "True"
+
+[tool.pytest.ini_options]
+log_cli = false
+testpaths = [
+ "tests"
+]
+
+[tool.black]
+line-length = 88
+
+[tool.ruff]
+# Same as Black. + 10% rule of black
+line-length = 88
+exclude = [
+ "nostr",
+]
+
+[tool.ruff.lint]
+# Enable:
+# F - pyflakes
+# E - pycodestyle errors
+# W - pycodestyle warnings
+# I - isort
+# A - flake8-builtins
+# C - mccabe
+# N - naming
+# UP - pyupgrade
+# RUF - ruff
+# B - bugbear
+select = ["F", "E", "W", "I", "A", "C", "N", "UP", "RUF", "B"]
+ignore = ["C901"]
+
+# Allow autofix for all enabled rules (when `--fix`) is provided.
+fixable = ["ALL"]
+unfixable = []
+
+# Allow unused variables when underscore-prefixed.
+dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
+
+# needed for pydantic
+[tool.ruff.lint.pep8-naming]
+classmethod-decorators = [
+ "validator",
+ "root_validator",
+]
+
+# Ignore unused imports in __init__.py files.
+# [tool.ruff.lint.extend-per-file-ignores]
+# "__init__.py" = ["F401", "F403"]
+
+# [tool.ruff.lint.mccabe]
+# max-complexity = 10
+
+[tool.ruff.lint.flake8-bugbear]
+# Allow default arguments like, e.g., `data: List[str] = fastapi.Query(None)`.
+extend-immutable-calls = [
+ "fastapi.Depends",
+ "fastapi.Query",
+ "lnbits.decorators.parse_filters"
+]
diff --git a/run_cron_job.py b/run_cron_job.py
index 39e6584..593a291 100644
--- a/run_cron_job.py
+++ b/run_cron_job.py
@@ -1,66 +1,76 @@
import asyncio
-import httpx
-import os
+import datetime as dt
import json
import logging
import logging.handlers
-import datetime as dt
+import os
+from typing import Optional
+
+import httpx
dir_path = os.path.dirname(os.path.realpath(__file__))
-logname = os.path.join(dir_path, 'scheduler.log')
+logname = os.path.join(dir_path, "scheduler.log")
-logger = logging.getLogger('scheduler')
+logger = logging.getLogger("scheduler")
logger.setLevel(logging.DEBUG)
-handler = logging.FileHandler(filename=logname, encoding='utf-8', mode='a')
-dt_fmt = '%Y-%m-%d %H:%M:%S'
-formatter = logging.Formatter('[{asctime}] [{levelname}] {name}: {message}', dt_fmt, style='{')
+handler = logging.FileHandler(filename=logname, encoding="utf-8", mode="a")
+dt_fmt = "%Y-%m-%d %H:%M:%S"
+formatter = logging.Formatter(
+ "[{asctime}] [{levelname}] {name}: {message}", dt_fmt, style="{"
+)
handler.setFormatter(formatter)
logger.addHandler(handler)
-LNBITS_BASE_URL = os.environ.get('BASE_URL') or 'http://localhost:5000'
+LNBITS_BASE_URL = os.environ.get("BASE_URL") or "http://localhost:5000"
+
-async def save_job_execution(response: str, jobID: str, adminkey: str) -> None:
- '''
- Saves job execution to both db and to a logfile.
- We can decide if we want to prioritize either later
- Note: We are logging everything to a single file for now, but
- individual rows in db.
- '''
+async def save_job_execution(
+ response: httpx.Response, job_id: str, adminkey: str
+) -> None:
+ """
+ Saves job execution to both db and to a logfile.
+ We can decide if we want to prioritize either later
+ Note: We are logging everything to a single file for now, but
+ individual rows in db.
+ """
try:
# print(f' inside save_job_execution now ')
if response.status_code == 200:
- logger.info(f"jobID: {jobID}, status_code: {response.status_code}")
- # logger.info(f'jobID: {jobID}, response text: {response.text}')
+ logger.info(f"job_id: {job_id}, status_code: {response.status_code}")
+ # logger.info(f'job_id: {job_id}, response text: {response.text}')
- url = f'{LNBITS_BASE_URL}/scheduler/api/v1/logentry'
-
- logger.info(f'pushdb: response.status type: {type(response.status_code)}')
- logger.info(f'pushdb: response.text type: {type(response.text)}')
+ url = f"{LNBITS_BASE_URL}/scheduler/api/v1/logentry"
+
+ logger.info(f"pushdb: response.status type: {type(response.status_code)}")
+ logger.info(f"pushdb: response.text type: {type(response.text)}")
# we have some difficulty saving response.text to db, unicode?
- data = {'job_id': jobID,
- 'status': str(response.status_code),
- # 'response': 'sample text', # str(response.text),
- 'response': response.text,
- 'timestamp': dt.datetime.now().strftime('%Y-%m-%d %H:%M:%S') }
-
- logger.info(f'pushdb: now pushing execution data to the database for jobID: {jobID}')
- logger.info(f'pushdb: calling api : {url} with params: {data}')
-
+ data = {
+ "job_id": job_id,
+ "status": str(response.status_code),
+ # 'response': 'sample text', # str(response.text),
+ "response": response.text,
+ "timestamp": dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
+ }
+
+ logger.info(
+ "pushdb: now pushing execution data to "
+ f"the database for job_id: {job_id}"
+ )
+ logger.info(f"pushdb: calling api : {url} with params: {data}")
+
pushdb_response = httpx.post(
- url=url,
- headers={"X-Api-Key": adminkey},
- json=data
+ url=url, headers={"X-Api-Key": adminkey}, json=data
+ )
+ logger.info(
+ f"SaveJobExecution: pushdb status: {pushdb_response.status_code}"
)
- logger.info(f'SaveJobExecution: pushdb status: {pushdb_response.status_code}')
# logger.info(f'SaveJobExecution: pushdb text: {pushdb_response.text}')
if pushdb_response.status_code == 200:
- logger.info(f'success: saved results to db for jobID: {jobID}')
- return True
+ logger.info(f"success: saved results to db for job_id: {job_id}")
except Exception as e:
- logger.error(f"error, saving to database for jobID: {jobID}")
+ logger.error(f"error, saving to database for job_id: {job_id}")
logger.error(e)
- return False
async def process_json_body(request_body):
@@ -76,16 +86,16 @@ async def process_json_body(request_body):
return {}
-async def call_api(method_name, url, headers, body):
- '''
- Call API with parameters from database,
- assume body, headers is a string from the db
- this method called from run_cron_job.py for job execution
- '''
- http_verbs = ['get', 'post', 'put', 'delete']
+async def call_api(method_name, url, headers, body) -> Optional[httpx.Response]:
+ """
+ Call API with parameters from database,
+ assume body, headers is a string from the db
+ this method called from run_cron_job.py for job execution
+ """
+ http_verbs = ["get", "post", "put", "delete"]
try:
- body_json = {}
+ body_json: dict = {}
if body is None:
body_json = {}
elif len(body) > 0:
@@ -94,47 +104,55 @@ async def call_api(method_name, url, headers, body):
if method_name.lower() in http_verbs:
method_to_call = getattr(httpx, method_name.lower())
- if method_name.lower() in ['get', 'delete'] and body_json is not None:
+ response = None
+ if method_name.lower() in ["get", "delete"] and body_json is not None:
response = method_to_call(url, headers=headers, params=body_json)
- elif method_name.lower() in ['post', 'put']:
+ elif method_name.lower() in ["post", "put"]:
response = method_to_call(url, headers=headers, json=body_json)
- logger.info(f'[run_cron_job]: call_api response status: {response.status_code}')
- logger.info(f'[run_cron_job]: call_api response text: {response.text}')
+ assert response, "response is None"
+ logger.info(
+ f"[run_cron_job]: call_api response status: {response.status_code}"
+ )
+ logger.info(f"[run_cron_job]: call_api response text: {response.text}")
return response
else:
- logger.info(f'Invalid method name: {method_name}')
+ logger.info(f"Invalid method name: {method_name}")
- except json.JSONDecodeError as e:
- logger.info(f'body json decode error: {e}')
- raise e
+ except json.JSONDecodeError as exc:
+ logger.info(f"body json decode error: {exc!s}")
+ raise exc
+ return None
-async def get_job_by_id(jobID: str, adminkey: str):
- '''
- Gets job by jobID from API, as this script run by cron
- doesn't have access to entire lnbits environment
- '''
+async def get_job_by_id(job_id: str, adminkey: str):
+ """
+ Gets job by job_id from API, as this script run by cron
+ doesn't have access to entire lnbits environment
+ """
try:
- url = f'{LNBITS_BASE_URL}/scheduler/api/v1/jobs/{jobID}'
+ url = f"{LNBITS_BASE_URL}/scheduler/api/v1/jobs/{job_id}"
- response = httpx.get(
- url=url,
- headers={"X-Api-Key": adminkey}
+ response = httpx.get(url=url, headers={"X-Api-Key": adminkey})
+ logger.info(
+ f"[get_job_by_id]: response items in get_job_by_id: {response.text}\n"
)
- logger.info(f"[get_job_by_id]: response items in get_job_by_id: {response.text}\n")
items = json.loads(response.text)
return items
except Exception as e:
- logger.error(f'[get_job_by_id]: exception thrown: {e}')
- logger.error(f'[get_job_by_id]: Error trying to fetch data from db, check is LNBITS server running?: {e}')
+ logger.error(f"[get_job_by_id]: exception thrown: {e}")
+ logger.error(
+ "[get_job_by_id]: Error trying to fetch data from db, "
+ f"check is LNBITS server running?: {e}"
+ )
-async def clear_log_file(logname: str) -> bool:
- '''
- Clears the log file by deleting the file on disk
- '''
+
+async def clear_log_file(logname: str) -> bool:
+ """
+ Clears the log file by deleting the file on disk
+ """
status = True
- try:
+ try:
os.remove(logname)
return status
except Exception as e:
@@ -149,48 +167,55 @@ async def check_logfile(logfile: str) -> None:
logger.info(f"[check_logfile]: The file {logfile} exists.")
else:
# Create the file
- with open(logfile, 'w') as file:
- now = dt.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+ with open(logfile, "w") as file:
+ now = dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
file.write(f"[{now}][check_logfile]: This is a new scheduler logfile.")
+
async def main() -> None:
- '''
- The main method that is run when the run_cron_job.py is executed
- It will get jobID from the environment and query the DB
- for the http verb, url, headers, and data. Then it will execute
- the API call and log the result.
-
- example data:
- headers = [], default value
- body = None, default value
- [{"key":"X-Api-Key","value":"0b2569190e2f4b"}]
- '''
+ """
+ The main method that is run when the run_cron_job.py is executed
+ It will get job_id from the environment and query the DB
+ for the http verb, url, headers, and data. Then it will execute
+ the API call and log the result.
+
+ example data:
+ headers = [], default value
+ body = None, default value
+ [{"key":"X-Api-Key","value":"0b2569190e2f4b"}]
+ """
try:
await check_logfile(logname)
- logger.info('[run_cron_job]: LNBITS_BASE_URL = %s', LNBITS_BASE_URL)
- jobID = os.environ.get('ID')
- adminkey = os.environ.get('adminkey')
- logger.info('[run_cron_job]: jobID: %s adminkey: %s', jobID, adminkey)
+ logger.info("[run_cron_job]: LNBITS_BASE_URL = %s", LNBITS_BASE_URL)
+ job_id = os.environ.get("ID")
+ assert job_id, "job_id not found in environment variables"
+ adminkey = os.environ.get("adminkey")
+ assert adminkey, "adminkey not found in environment variables"
+ logger.info("[run_cron_job]: job_id: %s adminkey: %s", job_id, adminkey)
- job = await get_job_by_id(jobID, adminkey)
- method_name = job['selectedverb']
- url = job['url']
- headers = job['headers']
- body = job['body']
+ job = await get_job_by_id(job_id, adminkey)
+ assert job, "job not found in database"
+ method_name = job["selectedverb"]
+ url = job["url"]
+ headers = job["headers"]
+ body = job["body"]
json_headers = {}
- for h in headers:
- json_headers.update({h['key']: h['value']})
+ for h in headers:
+ json_headers.update({h["key"]: h["value"]})
response = await call_api(method_name, url, json_headers, body)
- logger.info(f'[run_cron_job]: response status from api call: {response.status_code}')
- logger.info(f'response text from api call: {response.text}')
+ assert response, "response is None"
+ logger.info(
+ f"[run_cron_job]: response status from api call: {response.status_code}"
+ )
+ logger.info(f"response text from api call: {response.text}")
- await save_job_execution(response=response, jobID=jobID, adminkey=adminkey)
+ await save_job_execution(response=response, job_id=job_id, adminkey=adminkey)
except Exception as e:
- logger.error(f'exception thrown in main() run_cron_job: {e}')
+ logger.error(f"exception thrown in main() run_cron_job: {e}")
asyncio.run(main())
diff --git a/static/js/index.js b/static/js/index.js
new file mode 100644
index 0000000..4707607
--- /dev/null
+++ b/static/js/index.js
@@ -0,0 +1,558 @@
+const mapcrontabs = function (obj) {
+ obj.date = Quasar.date.formatDate(new Date(obj.time), 'YYYY-MM-DD HH:mm')
+ obj.fsat = new Intl.NumberFormat(LOCALE).format(obj.amount)
+ obj.walllink = ['../wallet?usr=', obj.user, '&wal=', obj.id].join('')
+ obj._data = _.clone(obj)
+ return obj
+}
+
+window.app = Vue.createApp({
+ el: '#vue',
+ mixins: [windowMixin],
+ data() {
+ return {
+ testlogData: 'All test log content',
+ fileData: 'All Logfile content', // for log file data
+ output: 'Individual Job Logs',
+ job_name: '', // for the create job dialog
+ httpVerbs: ['GET', 'PUT', 'POST', 'DELETE'],
+ selectedverb: 'GET',
+ url: '',
+ body: '',
+ status: '',
+ wallets: [],
+ jobs: [],
+ shortcuts: [
+ '@reboot',
+ '@hourly',
+ '@daily',
+ '@weekly',
+ '@monthly',
+ '@yearly'
+ ],
+ slots: ['minute', 'hour', 'day', 'month', 'weekday'],
+ cron: {
+ minute: '*',
+ hour: '*',
+ day: '*',
+ month: '*',
+ weekday: '*'
+ },
+ jobsTable: {
+ columns: [
+ {name: 'id', align: 'left', label: 'ID', field: 'id'},
+ {
+ name: 'name',
+ align: 'left',
+ label: 'Job Name',
+ field: 'name'
+ },
+ {
+ name: 'status',
+ align: 'left',
+ label: 'Is Running?',
+ field: 'status'
+ },
+ {
+ name: 'schedule',
+ align: 'left',
+ label: 'Schedule',
+ field: 'schedule'
+ }
+ ],
+ pagination: {
+ rowsPerPage: 10
+ }
+ },
+ logDialog: {
+ show: false
+ },
+ testlogDialog: {
+ show: false,
+ output: '',
+ id: ''
+ },
+ id_code: '',
+ jobLogDialog: {
+ show: false,
+ output: '',
+ id: ''
+ },
+ jobDialog: {
+ show: false,
+ cron: {
+ minute: '*',
+ hour: '*',
+ day: '*',
+ month: '*',
+ weekday: '*'
+ },
+ data: {
+ selectedverb: 'GET',
+ schedule: '* * * * *',
+ status: 'false'
+ },
+ headers: []
+ },
+ jobStatus: {
+ show: false,
+ data: {}
+ }
+ }
+ },
+ computed: {
+ userOptions() {
+ return this.jobs.map(function (obj) {
+ //console.log(obj.id)
+ return {
+ value: String(obj.id),
+ label: String(obj.id)
+ }
+ })
+ },
+ concatenatedString() {
+ // do some validation checking here for cron string
+ return `${this.cron.minute} ${this.cron.hour} ${this.cron.day} ${this.cron.month} ${this.cron.weekday}`
+ }
+ },
+ watch: {
+ concatenatedString(newValue) {
+ // for cron string
+ this.jobDialog.data.schedule = newValue.trim()
+ }
+ },
+ methods: {
+ ///////////////Jobs////////////////////////////
+ getJobs() {
+ LNbits.api
+ .request(
+ 'GET',
+ '/scheduler/api/v1/jobs',
+ this.g.user.wallets[0].adminkey
+ )
+ .then(response => {
+ this.jobs = response.data.map(function (obj) {
+ return mapcrontabs(obj)
+ })
+ // console.log("[index] getjobs: ", JSON.stringify(this.jobs))
+ })
+ },
+ openLogDialog: function (linkId) {
+ this.jobLogDialog.show = true
+ const link = _.findWhere(this.jobs, {id: linkId})
+ this.jobLogDialog.id = _.clone(link._data.id)
+ this.id_code = this.jobLogDialog.id
+ },
+ openTestlogDialog: function (linkId) {
+ this.testlogDialog.show = true
+ const link = _.findWhere(this.jobs, {id: linkId})
+ this.testlogDialog.id = _.clone(link._data.id)
+ this.id_code = this.testlogDialog.id
+ },
+ fetchJobLogDialog: function (id) {
+ const id = this.jobLogDialog.id
+ //console.log("this.jobLogDialog.id ", this.jobLogDialog.id)
+ //console.log("fetch job Log Dialog: ", id)
+ LNbits.api
+ .request(
+ 'GET',
+ '/scheduler/api/v1/logentry/' + id,
+ this.g.user.wallets[0].adminkey
+ )
+ .then(response => {
+ //console.log("fetch job Log Dialog: ", id)
+ //console.log(JSON.stringify(response))
+ //console.log(response.status)
+ this.output = response.data
+ this.id_code = id
+ })
+ .catch(function (error) {
+ LNbits.utils.notifyApiError(error)
+ })
+ },
+ clearJobLogDialog: function () {
+ this.output = ''
+ },
+ closeJobLogDialog: function () {
+ this.output = '' // clears the dialog content on close
+ this.jobLogDialog.show = false
+ },
+ openJobUpdateDialog(linkId) {
+ const link = _.findWhere(this.jobs, {id: linkId})
+ this.jobDialog.data = _.clone(link._data)
+ if (link._data.headers === null || link._data.headers === undefined) {
+ // console.log("[index] open Job Update Dialog: headers is null or undefined")
+ this.jobDialog.headers = []
+ } else {
+ this.jobDialog.headers = _.clone(link._data.headers)
+ }
+ if (link._data.schedule) {
+ let [minute, hour, day, month, weekday] =
+ this.jobDialog.data.schedule.split(' ')
+ this.cron.minute = minute
+ this.cron.hour = hour
+ this.cron.day = day
+ this.cron.month = month
+ this.cron.weekday = weekday
+ //console.log("this.cron.minute", this.cron.minute)
+ }
+
+ this.jobDialog.show = true
+ },
+ sendJobFormData() {
+ console.log(
+ '[index] sendJobFormData headers: ',
+ JSON.stringify(this.jobDialog.headers, null, 2)
+ )
+ // console.log('sendJobFormData headers: ', this.jobDialog.headers)
+
+ const data = {
+ id: this.jobDialog.data.id,
+ name: this.jobDialog.data.name,
+ status: this.jobDialog.data.status,
+ selectedverb: this.jobDialog.data.selectedverb,
+ url: this.jobDialog.data.url,
+ headers: this.jobDialog.headers,
+ body: this.jobDialog.data.body,
+ schedule: this.jobDialog.data.schedule,
+ extra: {}
+ }
+
+ if (this.jobDialog.data.id) {
+ console.log('[index] sendJobFormData: update job data')
+ console.log(data)
+ console.log(
+ '[index] sendJobFormData: headers: ',
+ JSON.stringify(data.headers)
+ )
+ this.updateJob(data)
+ } else {
+ console.log('[index] sendJobFormData: create new job entry')
+ console.log(data)
+ this.createJob(data)
+ }
+ },
+ displayTestJobData(job_id) {
+ // for test log data
+ // console.log("job id: ", job_id)
+ LNbits.api
+ .request(
+ 'GET',
+ '/scheduler/api/v1/test_log/' + job_id,
+ this.g.user.wallets[0].adminkey
+ )
+ .then(response => {
+ //console.log("response.data: ", response.data)
+ this.testlogData = response.data
+ })
+ .catch(function (error) {
+ LNbits.utils.notifyApiError(error)
+ })
+ },
+ clearTestDialog: function () {
+ // for test log data
+ this.testlogDialog.show = false
+ this.testlogData = ''
+ },
+ displayFileData() {
+ // for complete log data
+ LNbits.api
+ .request(
+ 'GET',
+ '/scheduler/api/v1/complete_log',
+ this.g.user.wallets[0].adminkey
+ )
+ .then(response => {
+ this.fileData = response.data
+ })
+ .catch(function (error) {
+ LNbits.utils.notifyApiError(error)
+ })
+ },
+ clearLogDialog: function () {
+ // for complete log data
+ this.logDialog.show = false
+ // this.fileData = ''
+ },
+ deleteLog: function () {
+ // for complete log data
+ LNbits.api
+ .request(
+ 'POST',
+ '/scheduler/api/v1/delete_log',
+ this.g.user.wallets[0].adminkey
+ )
+ .then(response => {
+ if (response.status == 200) {
+ this.fileData = ''
+ }
+ })
+ .catch(function (error) {
+ LNbits.utils.notifyApiError(error)
+ })
+ },
+ cancelEdit: function () {
+ this.jobDialog.show = false
+ this.jobDialog.headers = []
+ default_data = {
+ selectedverb: 'GET',
+ schedule: '* * * * *',
+ status: 'false',
+ name: '',
+ url: '',
+ body: ''
+ }
+ this.jobDialog.data = default_data
+ this.cron = {
+ minute: '*',
+ hour: '*',
+ day: '*',
+ month: '*',
+ weekday: '*'
+ }
+ },
+ updateJob(data) {
+ LNbits.api
+ .request(
+ 'PUT',
+ '/scheduler/api/v1/jobs/' + data.id,
+ this.g.user.wallets[0].adminkey,
+ data
+ )
+ .then(response => {
+ // this.jobs.push(mapcrontabs(response.data))
+ this.jobDialog.show = false
+ this.jobDialog.data = {}
+ this.jobDialog.headers = []
+ data = {
+ selectedverb: 'GET',
+ schedule: '* * * * *',
+ status: 'false'
+ }
+ this.cron = {
+ minute: '*',
+ hour: '*',
+ day: '*',
+ month: '*',
+ weekday: '*'
+ }
+
+ this.getJobs()
+ })
+ .catch(function (error) {
+ LNbits.utils.notifyApiError(error)
+ })
+ },
+ createJob(data) {
+ LNbits.api
+ .request(
+ 'POST',
+ '/scheduler/api/v1/jobs',
+ this.g.user.wallets[0].adminkey,
+ data
+ )
+ .then(response => {
+ this.jobs.push(mapcrontabs(response.data))
+ //console.log("[index] createJob response: ", response.text)
+ // console.log("[index] createJob: this.jobs ", JSON.stringify(this.jobs))
+
+ this.jobDialog.show = false
+ this.jobDialog.data = {}
+ this.jobDialog.headers = []
+ data = {
+ selectedverb: 'GET',
+ schedule: '* * * * *',
+ status: 'false'
+ }
+ this.cron = {
+ minute: '*',
+ hour: '*',
+ day: '*',
+ month: '*',
+ weekday: '*'
+ }
+ })
+ .catch(function (error) {
+ LNbits.utils.notifyApiError(error)
+ })
+ },
+ deleteJob(jobId) {
+ LNbits.utils
+ .confirmDialog('Are you sure you want to delete this Job?')
+ .onOk(function () {
+ LNbits.api
+ .request(
+ 'DELETE',
+ '/scheduler/api/v1/jobs/' + jobId,
+ this.g.user.wallets[0].adminkey
+ )
+ .then(response => {
+ this.jobs = _.reject(this.jobs, function (obj) {
+ return obj.id == jobId
+ })
+ })
+ .catch(function (error) {
+ LNbits.utils.notifyApiError(error)
+ })
+ })
+ },
+ toggleJobsStatus(id) {
+ for (let i = 0; i < this.jobs.length; i++) {
+ if (this.jobs[i].id === id) {
+ const newstatus = this.jobs[i].status
+ if (newstatus === true) {
+ this.jobs[i].status = false
+ } else {
+ this.jobs[i].status = true
+ }
+ console.log(
+ 'jobs id: ',
+ this.jobs[i].id,
+ 'status:',
+ this.jobs[i].status
+ )
+ //break; // Stop the loop once the target object is found
+ return this.jobs[i].status
+ }
+ }
+ },
+ lookupJobsStatus(id) {
+ for (let i = 0; i < this.jobs.length; i++) {
+ if (this.jobs[i].id === id) {
+ return this.jobs[i].status
+ }
+ }
+ },
+ toggleButton(id) {
+ const lookup_state = this.lookupJobsStatus(id)
+ //console.log("lookup: ", lookup_state)
+ //console.log("opposite: ", !lookup_state)
+ const result = this.pauseJob(id, !lookup_state)
+ //console.log("result: ", result)
+ },
+ getButtonIcon(id) {
+ const lookup_state = this.lookupJobsStatus(id)
+ return lookup_state ? 'stop' : 'play_arrow'
+ },
+ getButtonText(id) {
+ const lookup_state = this.lookupJobsStatus(id)
+ return lookup_state ? 'Stop' : 'Play'
+ },
+ getButtonColor(id) {
+ const lookup_state = this.lookupJobsStatus(id)
+ return lookup_state ? 'red' : 'green'
+ },
+ pauseJob(jobId, status) {
+ let confirm_msg = 'Stopping, Are you sure?' // stop
+ if (status) {
+ confirm_msg = 'Are you sure you want to Start?'
+ }
+ LNbits.utils.confirmDialog(confirm_msg).onOk(function () {
+ LNbits.api
+ .request(
+ 'POST',
+ '/scheduler/api/v1/pause/' + jobId + '/' + status,
+ this.g.user.wallets[0].adminkey
+ )
+ .then(response => {
+ //console.log("Pause Response status", response.status);
+ const toggle_state = this.toggleJobsStatus(jobId)
+ })
+ .catch(function (error) {
+ LNbits.utils.notifyApiError(error)
+ })
+ })
+ },
+ flattenExportCSV(columns, data, fileName) {
+ const flattenNestedData = function (data, field) {
+ if (typeof field === 'function') {
+ return field(data)
+ } else if (field === 'headers') {
+ return JSON.stringify(data['headers'])
+ } else if (field === 'body') {
+ return JSON.stringify(data['body'])
+ } else if (typeof field === 'object') {
+ return Object.keys(field)
+ .map(key => `${key}: ${field[key]}`)
+ .join(', ')
+ } else {
+ return field.split('.').reduce((obj, key) => obj[key], data)
+ }
+ }
+
+ const wrapCsvValue = function (val, formatFn) {
+ const formatted = formatFn !== void 0 ? formatFn(val) : val
+
+ formatted =
+ formatted === void 0 || formatted === null ? '' : String(formatted)
+
+ formatted = formatted.split('"').join('""')
+
+ return `"${formatted}"`
+ }
+
+ const content = [
+ columns.map(function (col) {
+ return wrapCsvValue(col.label)
+ })
+ ]
+ .concat(
+ data.map(function (row) {
+ return columns
+ .map(function (col) {
+ const fieldValue = flattenNestedData(row, col.field)
+ return wrapCsvValue(fieldValue, col.format)
+ })
+ .join(',')
+ })
+ )
+ .join('\r\n')
+
+ const status = Quasar.utils.exportFile(
+ `${fileName || 'table-export'}.csv`,
+ content,
+ 'text/csv'
+ )
+
+ if (status !== true) {
+ Quasar.plugins.Notify.create({
+ message: 'Browser denied file download...',
+ color: 'negative',
+ icon: null
+ })
+ }
+ },
+ exportJobsCSV() {
+ //LNbits.utils.exportCSV(this.jobsTable.columns, this.jobs)
+ LNbits.api
+ .request(
+ 'GET',
+ '/scheduler/api/v1/jobs',
+ this.g.user.wallets[0].adminkey
+ )
+ .then(response => {
+ if (response.status == 200) {
+ let data = response.data
+ // Dynamically generate columns based on keys in the data
+ let columns = Object.keys(data[0]).map(key => ({
+ label: key.charAt(0).toUpperCase() + key.slice(1),
+ field: key
+ }))
+ this.flattenExportCSV(columns, data, 'scheduler-export')
+ }
+ })
+ .catch(LNbits.utils.notifyApiError)
+ }
+ },
+ props: {
+ row: {
+ type: Object,
+ default: () => ({id: null}) // Define a default row object if none is provided
+ }
+ },
+ created() {
+ if (this.g.user.wallets.length) {
+ this.getJobs()
+ }
+ }
+})
diff --git a/templates/scheduler/_api_docs.html b/templates/scheduler/_api_docs.html
index 6c27966..acc02fe 100644
--- a/templates/scheduler/_api_docs.html
+++ b/templates/scheduler/_api_docs.html
@@ -10,23 +10,21 @@
Scheduler Extension: Make and manage scheduled events on LNBits
- Call any REST API on a schedule you decide. This extension allows
- the creation and management of timed processes via cron tabs.
- Need help making a schedule? visit
- https://crontab.guru.
-
- Note: The scheduler.log file is saved in the extension data folder and contains
- extra messages for debugging purposes. You can also watch it by running on the cli:
- `tail -f ~/lnbits/lnbits/extensions/scheduler/scheduler.log`.
- The individual job logging data displayed by the
-
-
- icon is saved into the database.
+ Call any REST API on a schedule you decide. This extension allows the
+ creation and management of timed processes via cron tabs. Need help
+ making a schedule? visit
+ https://crontab.guru.
+
+ Note: The scheduler.log file is saved in the extension data folder and
+ contains extra messages for debugging purposes. You can also watch it by
+ running on the cli: `tail -f
+ ~/lnbits/lnbits/extensions/scheduler/scheduler.log`. The individual job
+ logging data displayed by the
+
+
+ icon is saved into the database.
Created by,
@@ -45,10 +43,4 @@
:content-inset-level="0.5"
>
-
-
-
-
-
-
diff --git a/templates/scheduler/index.html b/templates/scheduler/index.html
index 7f660cd..c115a4a 100644
--- a/templates/scheduler/index.html
+++ b/templates/scheduler/index.html
@@ -1,5 +1,7 @@
{% extends "base.html" %} {% from "macros.jinja" import window_vars with context
-%} {% block page %}
+%} {% block scripts %} {{ window_vars(user) }}
+
+{% endblock %} {% block page %}
@@ -28,25 +30,27 @@ Scheduled Jobs List
- {% raw %}
-
+
-
- {{ col.label }}
-
+
-
-
+ Scheduled Jobs List
:icon="getButtonIcon(props.row.id)"
:color="getButtonColor(props.row.id)"
>
- {{ getButtonText(props.row.id) }}
+
Scheduled Jobs List
Edit
- Scheduled Jobs List
Test the Job Setting
- Scheduled Jobs List
>
Log Details for this Job
-
-
- {{ col.value }}
-
+
Scheduled Jobs List
@click="deleteJob(props.row.id)"
icon="cancel"
color="pink"
- >Delete
+ >Delete
- {% endraw %}
-
-
@@ -129,92 +134,132 @@
-
- Check your details are right with a test run
- Current Job ID: {% raw %} {{id_code}}{% endraw %}
+
+
+ Check your details are right with a test run
+
+ Current Job ID:
-
-
- Test Run
-
- Close
-
-
+
+
+ Test Run
+
+ Close
+
+
-
-
-
- If you need more detail on error conditions, visit "View All Logs".
- This file can also be found at ~/lnbits/lnbits/extensions/scheduler/test_run_job.log
-
+ filled
+ v-model.trim="testlogData"
+ label="test log content"
+ type="textarea"
+ rows="15"
+ />
+
+
+
+
+ If you need more detail on error conditions, visit "View All Logs".
+ This file can also be found at
+ ~/lnbits/lnbits/extensions/scheduler/test_run_job.log
+
+
+
-
-
-
-
-
- Scheduler.log (last 1000 lines)
- This file can also be found at ~/lnbits/lnbits/extensions/scheduler/scheduler.log
-
-
-
- Fetch data
-
- Close
-
+
+
+
+
+ Scheduler.log (last 1000 lines)
+
+
+ This file can also be found at
+ ~/lnbits/lnbits/extensions/scheduler/scheduler.log
+
+
+
+
+ Fetch data
+
+ Close
+
-
-
-
-
-
- Delete File Log
-
-
-
-
+
+
+
+
+
+ Delete File Log
+
+
+
+
+
+
+
+
+ Individual Job Log:
+
+
+
+
+ Fetch data
+
+ Clear
+ Close
+
-
-
-
- Individual Job Log: {% raw %} {{id_code}}{% endraw %}
+
+
+
+
-
-
- Fetch data
-
- Clear
- Close
-
-
-
-
-
-
-
-
-
+
@@ -226,25 +271,30 @@ Individual Job Log: {% raw %} {{id_code}}{%
label="Job Name *"
>
-
+
+ v-model.trim="jobDialog.data.selectedverb"
+ :options="httpVerbs"
+ dense
+ filled
+ />
-
-
- Headers
- Example:
- "X-Api-Key: 0b2569190e2f4bfc90a7ae25aa194518"
-
+
+ Headers
+
+ Example: "X-Api-Key: 0b2569190e2f4bfc90a7ae25aa194518"
+
+
Headers
v-for="(item, index) in jobDialog.headers"
:key="index"
>
-
-
-
-
-
-
-
-
-
-
-
-
- Add Header Entry
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+ Add Header Entry
+
+
+
Body
-
+
Schedule *
- Visit https://crontab.guru for expression help.
+
+ Visit
+ https://crontab.guru
+ for expression help.
+
@@ -317,566 +376,20 @@ Schedule *
dense
disable
v-model.trim="jobDialog.data.schedule"
- label="schedule"
+ label="schedule"
>
+ Submit
SubmitCancel
- Cancel
-{% endblock %} {% block scripts %} {{ window_vars(user) }}
-
{% endblock %}
diff --git a/test_run_job.py b/test_run_job.py
index 7a3b42b..32d1908 100644
--- a/test_run_job.py
+++ b/test_run_job.py
@@ -1,46 +1,55 @@
-import os
+import json
import logging
import logging.handlers
+import os
+
import httpx
from .crud import (
get_scheduler_job,
)
+
dir_path = os.path.dirname(os.path.realpath(__file__))
-logname = os.path.join(dir_path, 'test_run_job.log')
-logger = logging.getLogger('scheduler testlog')
+logname = os.path.join(dir_path, "test_run_job.log")
+logger = logging.getLogger("scheduler testlog")
logger.setLevel(logging.DEBUG)
-handler = logging.FileHandler(filename=logname, encoding='utf-8', mode='a')
-dt_fmt = '%Y-%m-%d %H:%M:%S'
-formatter = logging.Formatter('[{asctime}] [{levelname}] {name}: {message}', dt_fmt, style='{')
+handler = logging.FileHandler(filename=logname, encoding="utf-8", mode="a")
+dt_fmt = "%Y-%m-%d %H:%M:%S"
+formatter = logging.Formatter(
+ "[{asctime}] [{levelname}] {name}: {message}", dt_fmt, style="{"
+)
handler.setFormatter(formatter)
logger.addHandler(handler)
-async def test_job(job_id: str, adminkey: str) -> str:
- '''
- A clone of what is actually run when run_cron_job.py is executed
- This is used to execute the API call and log the result.
- '''
+
+async def test_job(job_id: str) -> str:
+ """
+ A clone of what is actually run when run_cron_job.py is executed
+ This is used to execute the API call and log the result.
+ """
try:
# print(f'[test_run_job]: jobid: {job_id} adminkey: {adminkey}')
jobinfo = await get_scheduler_job(job_id)
+ assert jobinfo, "Job not found"
# print(f'[test_run_job]: get scheduler job created: {jobinfo}')
- body_json = {}
+ body_json: dict = {}
json_headers = {}
-
+
method_name = jobinfo.selectedverb
url = jobinfo.url
+ assert url, "No URL found"
body = jobinfo.body
headers = jobinfo.headers
+ assert headers, "No headers found"
if body is None:
body_json = {}
elif len(body) > 0:
- body_json = body
+ body_json = json.loads(body)
# await process_json_body(body)
# print(f' Length of body data {len(body)}')
-
+
for h in headers:
key = h.key
value = h.value
@@ -50,35 +59,59 @@ async def test_job(job_id: str, adminkey: str) -> str:
# print(f'body_json: {body_json}')
# print(f'headers_json: {json_headers}')
- logger.info('[test_run_job]: url: %s headers: %s body: %s', url, json_headers, body_json)
+ logger.info(
+ "[test_run_job]: url: %s headers: %s body: %s", url, json_headers, body_json
+ )
+ response = None
# GET response
- if method_name == 'GET':
+ if method_name == "GET":
async with httpx.AsyncClient() as client:
response = await client.get(url, headers=json_headers, params=body_json)
- logger.info('[test_run_job]: response status from api call: %s', response.status_code)
- logger.info('[test_run_job]: response text from api call: %s', response.text)
+ logger.info(
+ "[test_run_job]: response status from api call: %s",
+ response.status_code,
+ )
+ logger.info(
+ "[test_run_job]: response text from api call: %s", response.text
+ )
# POST response
- elif method_name == 'POST':
+ elif method_name == "POST":
async with httpx.AsyncClient() as client:
response = await client.post(url, headers=json_headers, data=body_json)
- logger.info(f'[test_run_job]: response status from api call: {response.status_code}')
- logger.info(f'[test_run_job]: response text from api call: {response.text}')
+ logger.info(
+ "[test_run_job]: response status from api call: "
+ f"{response.status_code}"
+ )
+ logger.info(
+ f"[test_run_job]: response text from api call: {response.text}"
+ )
# PUT response
- elif method_name == 'PUT':
+ elif method_name == "PUT":
async with httpx.AsyncClient() as client:
response = await client.put(url, headers=json_headers, data=body_json)
- logger.info(f'[test_run_job]: response status from api call: {response.status_code}')
- logger.info(f'[test_run_job]: response text from api call: {response.text}')
+ logger.info(
+ "[test_run_job]: response status from api call: "
+ f"{response.status_code}"
+ )
+ logger.info(
+ f"[test_run_job]: response text from api call: {response.text}"
+ )
# DELETE response
- elif method_name == 'DELETE':
+ elif method_name == "DELETE":
async with httpx.AsyncClient() as client:
response = await client.delete(url, headers=json_headers)
- logger.info(f'[test_run_job]: response status from api call: {response.status_code}')
- logger.info(f'[test_run_job]: response text from api call: {response.text}')
+ logger.info(
+ "[test_run_job]: response status from api call: "
+ f"{response.status_code}"
+ )
+ logger.info(
+ f"[test_run_job]: response text from api call: {response.text}"
+ )
# return "testjob 1234"
+ assert response, "No response from API call"
return response.text
-
+
except Exception as e:
- logger.error('[test_job]:Exception thrown in [test_job]: %s', e)
- return str(e)
\ No newline at end of file
+ logger.error("[test_job]:Exception thrown in [test_job]: %s", e)
+ return str(e)
diff --git a/tests/test_cron_handler.py b/tests/test_cron_handler.py
index 6535db7..da9334c 100644
--- a/tests/test_cron_handler.py
+++ b/tests/test_cron_handler.py
@@ -1,33 +1,35 @@
-
import asyncio
import datetime as dt
import sys
-sys.path.insert(0,'..')
-from cron_handler import CronHandler
-from utils import get_env_data_as_dict
+sys.path.insert(0, "..")
+
import os
+from ..cron_handler import CronHandler
+from ..utils import get_env_data_as_dict
+
## TODO make this a legit pytest
-async def main():
- vars = get_env_data_as_dict('../.env')
- print(vars)
- username = True # vars['SCHEDULER_USER']
- print(f'Scheduler Username: {username}')
+
+async def main():
+ _vars = get_env_data_as_dict("../.env")
+ print(_vars)
+ username = True # vars['SCHEDULER_USER']
+ print(f"Scheduler Username: {username}")
print("testing CronHandler")
- env_vars = {'SHELL': '/usr/bin/bash', 'API_URL': 'http://localhost:8000'}
- id_vars = { 'ID': '23487923847298347928987'}
- id_vars_2 = { 'ID': 'adf098werlkj987'}
-
+ env_vars = {"SHELL": "/usr/bin/bash", "API_URL": "http://localhost:8000"}
+ id_vars = {"ID": "23487923847298347928987"}
+ id_vars_2 = {"ID": "adf098werlkj987"}
+
# unique job id number to be placed in comment
comment = "cron python script"
# echo_comment = "cron now echo"
now = dt.datetime.now()
- print(f'current datetime: {now}')
-
+ print(f"current datetime: {now}")
+
ch = CronHandler(user=username)
# regular cron job with comment
@@ -39,27 +41,35 @@ async def main():
dir_path = os.path.dirname(os.path.realpath(__name__))
command = py_path + f" {dir_path}/cron-job.py"
- response = await ch.new_job(command, "* * * * *", comment=id_vars['ID'], env=id_vars)
+ response = await ch.new_job(
+ command, "* * * * *", comment=id_vars["ID"], env=id_vars
+ )
print(response)
# cron job with env vars with errors redirected to text file
- response = await ch.new_job(f"/Users/bitcarrot/.pyenv/shims/python3 {dir_path}/../log_handler.py >> /tmp/output.txt 2>&1", "* * * * *", comment=id_vars_2['ID'], env=id_vars_2)
+ pypath = "/Users/bitcarrot/.pyenv/shims/python3"
+ response = await ch.new_job(
+ f"{pypath} {dir_path}/../log_handler.py >> /tmp/output.txt 2>&1",
+ "* * * * *",
+ comment=id_vars_2["ID"],
+ env=id_vars_2,
+ )
print(response)
# enable job
print("Enable Job by Comment")
- enable_status = await ch.enable_job_by_comment(comment=comment, bool=True)
- print(f'enabled status: {enable_status}')
+ enable_status = await ch.enable_job_by_comment(comment=comment, active=True)
+ print(f"enabled status: {enable_status}")
# disable job
print("Disable Job by Comment")
- disable_status = await ch.enable_job_by_comment(comment=comment, bool=False)
- print(f'enabled status: {disable_status}')
+ disable_status = await ch.enable_job_by_comment(comment=comment, active=False)
+ print(f"enabled status: {disable_status}")
# job status
- jobid = id_vars['ID']
+ jobid = id_vars["ID"]
status = await ch.get_job_status(jobid)
- print(f'ID: {jobid}, Job Status: {status}')
+ print(f"ID: {jobid}, Job Status: {status}")
# pretty print jobs
print("\npretty print jobs")
@@ -69,19 +79,19 @@ async def main():
await ch.edit_job("ls", "*/8 * * * *", comment=comment)
# validate cron string is valid
- cron_string = '10 * * * *'
+ cron_string = "10 * * * *"
is_valid = await ch.validate_cron_string(cron_string)
- print(f'cron string {cron_string} is valid: {is_valid}')
+ print(f"cron string {cron_string} is valid: {is_valid}")
# validate cron string is valid
- cron_string = 'hourly'
+ cron_string = "hourly"
is_valid = await ch.validate_cron_string(cron_string)
- print(f'cron string {cron_string} is valid: {is_valid}')
+ print(f"cron string {cron_string} is valid: {is_valid}")
# validate cron string is valid
- cron_string = '@reboot'
+ cron_string = "@reboot"
is_valid = await ch.validate_cron_string(cron_string)
- print(f'cron string {cron_string} is valid: {is_valid}')
+ print(f"cron string {cron_string} is valid: {is_valid}")
# set global env vars
print("set global env vars")
@@ -100,6 +110,5 @@ async def main():
await ch.clear_all_jobs()
-
if __name__ == "__main__":
asyncio.run(main())
diff --git a/tests/test_httpx.py b/tests/test_httpx.py
index 64b7940..fefa249 100644
--- a/tests/test_httpx.py
+++ b/tests/test_httpx.py
@@ -1,58 +1,69 @@
import json
+
import httpx
-http_verbs = ['get', 'post', 'put', 'delete', 'head', 'options']
+http_verbs = ["get", "post", "put", "delete", "head", "options"]
+
def call_api(method_name, url, headers, body):
# assume body is a string from the db here
# this method called from run_cron_job.py for job execution
- print(f'body: {body} , type: {type(body)}')
+ print(f"body: {body} , type: {type(body)}")
try:
body_json = None
if body is not None:
body_json = json.loads(body)
- print(f'body json: {body_json}')
+ print(f"body json: {body_json}")
if method_name.lower() in http_verbs:
method_to_call = getattr(httpx, method_name.lower())
- print(f'method_to_call: {method_to_call}')
-
- if method_name.lower() in ['get', 'delete'] and body_json is not None:
+ print(f"method_to_call: {method_to_call}")
+
+ response = None
+ if method_name.lower() in ["get", "delete"] and body_json is not None:
response = method_to_call(url, headers=headers, params=body_json)
- elif method_name.lower() in ['post', 'put']:
+ elif method_name.lower() in ["post", "put"]:
response = method_to_call(url, headers=headers, json=body_json)
+ assert response, "response is None"
+
print("response from httpx call: ")
print(response.status_code)
print(response.text)
return response
else:
- print(f'Invalid method name: {method_name}')
+ print(f"Invalid method name: {method_name}")
except json.JSONDecodeError as e:
- print(f'body json decode error: {e}')
+ print(f"body json decode error: {e}")
raise e
def get_example_test(headers):
- '''
+ """
simple GET example, get lnurlp
- '''
+ """
method_name = "GET"
url = "http://localhost:5000/lnurlp/api/v1/links"
- body = json.dumps({"out": "true"}) # if there is no body pass None.
+ body = json.dumps({"out": "true"}) # if there is no body pass None.
response = call_api(method_name, url, headers, body)
return response
def post_example_test():
- '''
+ """
simple POST example, create lnurlp
- '''
- method_name = 'POST'
+ """
+ method_name = "POST"
url = "http://localhost:5000/lnurlp/api/v1/links"
headers = {"X-Api-Key": "70a745c683034ca2b22287d8d1538dee"}
- body_dict = {"description": "testlnurlp", "amount": 1000, "max": 1000, "min": 1000, "comment_chars": 0} # , "username": "foobar3"}
+ body_dict = {
+ "description": "testlnurlp",
+ "amount": 1000,
+ "max": 1000,
+ "min": 1000,
+ "comment_chars": 0,
+ } # , "username": "foobar3"}
body = json.dumps(body_dict)
response = call_api(method_name, url, headers, body)
return response
@@ -60,12 +71,15 @@ def post_example_test():
if __name__ == "__main__":
- data_list = [{"key":"X-Api-Key","value":"70a745c683034ca2b22287d8d1538dee"},{"key":"Content-type","value":"application/json"}]
+ data_list = [
+ {"key": "X-Api-Key", "value": "70a745c683034ca2b22287d8d1538dee"},
+ {"key": "Content-type", "value": "application/json"},
+ ]
json_data = json.dumps(data_list, indent=4)
data = {}
for e in data_list:
print(f'key: {e["key"]}, value: {e["value"]}')
- data.update({e['key']: e['value']})
+ data.update({e["key"]: e["value"]})
print(json.dumps(data))
@@ -73,4 +87,4 @@ def post_example_test():
post_example_test()
- print('Continue here with method_name calling response ')
+ print("Continue here with method_name calling response ")
diff --git a/tests/test_log_handler.py b/tests/test_log_handler.py
index df140e2..c83ad95 100644
--- a/tests/test_log_handler.py
+++ b/tests/test_log_handler.py
@@ -1,21 +1,25 @@
+import asyncio
import logging
import os
-from cron_handler import CronHandler
-from utils import get_env_data_as_dict
-import asyncio
+
+from ..cron_handler import CronHandler
+from ..utils import get_env_data_as_dict
# This is a sample logging file, for Testing Purposes only
dir_path = os.path.dirname(os.path.realpath(__file__))
-filename = os.path.join(dir_path, 'logfile.log')
+filename = os.path.join(dir_path, "logfile.log")
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
file_handler = logging.FileHandler(filename)
file_handler.setLevel(logging.INFO)
-file_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
+file_handler.setFormatter(
+ logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
+)
logger.addHandler(file_handler)
+
async def main():
logger.info(f"Path: {dir_path}")
@@ -24,18 +28,20 @@ async def main():
logger.warning("sample warning mesg")
try:
- vars = get_env_data_as_dict(f'{dir_path}/.env')
- logger.info(vars)
- username = vars['SCHEDULER_USER']
- jobID = os.environ.get('ID')
- print(f'jobID: {jobID}')
+ _vars = get_env_data_as_dict(f"{dir_path}/.env")
+ logger.info(_vars)
+ username = _vars["SCHEDULER_USER"]
+ job_id = os.environ.get("ID")
+ assert job_id, "Job ID not found in environment variables"
+ print(f"jobID: {job_id}")
ch = CronHandler(username)
- status = await ch.get_job_status(jobID)
- logger.info(f'ID: {jobID}, Job Status: {status}')
- print(f'ID: {jobID}, Job Status: {status}')
- except Exception as e:
+ status = await ch.get_job_status(job_id)
+ logger.info(f"ID: {job_id}, Job Status: {status}")
+ print(f"ID: {job_id}, Job Status: {status}")
+ except Exception as e:
print(e)
- logger.error(f'Error: {e}')
+ logger.error(f"Error: {e}")
+
-asyncio.run(main())
\ No newline at end of file
+asyncio.run(main())
diff --git a/tests/test_log_insert.py b/tests/test_log_insert.py
index bafc559..5e99f56 100644
--- a/tests/test_log_insert.py
+++ b/tests/test_log_insert.py
@@ -1,46 +1,48 @@
-import httpx
import asyncio
-from datetime import datetime
import logging
import logging.handlers
-import os
import sys
-sys.path.insert(0,'..')
-logfile = 'test_scheduler.log'
+import httpx
+
+sys.path.insert(0, "..")
+
+logfile = "test_scheduler.log"
-logger = logging.getLogger('test_scheduler')
+logger = logging.getLogger("test_scheduler")
logger.setLevel(logging.DEBUG)
-handler = logging.FileHandler(filename=logfile, encoding='utf-8', mode='a')
-dt_fmt = '%Y-%m-%d %H:%M:%S'
-formatter = logging.Formatter('[{asctime}] [{levelname}] {name}: {message}', dt_fmt, style='{')
+handler = logging.FileHandler(filename=logfile, encoding="utf-8", mode="a")
+dt_fmt = "%Y-%m-%d %H:%M:%S"
+formatter = logging.Formatter(
+ "[{asctime}] [{levelname}] {name}: {message}", dt_fmt, style="{"
+)
handler.setFormatter(formatter)
logger.addHandler(handler)
async def main_test() -> None:
- http_verbs = ['get', 'post', 'put', 'delete', 'head', 'options']
+ http_verbs = ["get", "post", "put", "delete", "head", "options"]
try:
- jobID = "12345test"
- method_name = 'GET' # HTTP verb determined by DB query
- url = 'https://example.com'
- headers = {'X-Custom': 'value'}
- data = {'key': 'value'}
+ job_id = "12345test"
+ method_name = "GET" # HTTP verb determined by DB query
+ url = "https://example.com"
+ headers = {"X-Custom": "value"}
+ data = {"key": "value"}
# Check if the method_name is valid for httpx
if method_name.lower() in http_verbs:
method_to_call = getattr(httpx, method_name.lower())
response = method_to_call(url, headers=headers, params=data)
if response.status_code == 200:
- logger.info(f"jobID: {jobID}, status_code: {response.status_code}")
- logger.info(f'jobID: {jobID}, response text: {response.text}')
+ logger.info(f"job_id: {job_id}, status_code: {response.status_code}")
+ logger.info(f"job_id: {job_id}, response text: {response.text}")
else:
- logger.error(f"error, saving to database for jobID: {jobID}")
+ logger.error(f"error, saving to database for job_id: {job_id}")
else:
- logger.error(f'Invalid method name: {method_name}')
+ logger.error(f"Invalid method name: {method_name}")
except Exception as e:
- logger.error(f'exception thrown: {e}')
+ logger.error(f"exception thrown: {e}")
asyncio.run(main_test())
diff --git a/utils.py b/utils.py
index ea23e15..32cc551 100644
--- a/utils.py
+++ b/utils.py
@@ -1,5 +1,9 @@
def get_env_data_as_dict(path: str) -> dict:
- with open(path, 'r') as f:
- return dict(tuple(line.replace('\n', '').split('=')) for line
- in f.readlines() if not line.startswith('#'))
-
+ with open(path) as f:
+ return dict(
+ tuple(
+ line.replace("\n", "").split("=")
+ for line in f.readlines()
+ if not line.startswith("#")
+ )
+ )
diff --git a/views.py b/views.py
index e4645fe..b26404e 100644
--- a/views.py
+++ b/views.py
@@ -1,14 +1,18 @@
-from fastapi import Depends, Request
-from starlette.responses import HTMLResponse
-
+from fastapi import APIRouter, Depends, Request
from lnbits.core.models import User
from lnbits.decorators import check_user_exists
+from lnbits.helpers import template_renderer
+from starlette.responses import HTMLResponse
+
+scheduler_generic_router = APIRouter()
+
-from . import scheduler_ext, scheduler_renderer
+def scheduler_renderer():
+ return template_renderer(["scheduler/templates"])
-@scheduler_ext.get("/", response_class=HTMLResponse)
+@scheduler_generic_router.get("/", response_class=HTMLResponse)
async def index(request: Request, user: User = Depends(check_user_exists)):
return scheduler_renderer().TemplateResponse(
- "scheduler/index.html", {"request": request, "user": user.dict()}
+ "scheduler/index.html", {"request": request, "user": user.json()}
)
diff --git a/views_api.py b/views_api.py
index d4bb266..f18137c 100644
--- a/views_api.py
+++ b/views_api.py
@@ -1,45 +1,33 @@
from http import HTTPStatus
-from typing import List
-from fastapi import Depends, Query
-from starlette.exceptions import HTTPException
-
-from lnbits.core.crud import get_user
-from lnbits.db import Filters
+from fastapi import APIRouter, Depends, HTTPException
+from lnbits.core.models import WalletTypeInfo
+from lnbits.db import Filters, Page
from lnbits.decorators import (
- WalletTypeInfo,
- get_key_type,
parse_filters,
require_admin_key,
+ require_invoice_key,
)
from lnbits.helpers import generate_filter_params_openapi
-from . import scheduler_ext
from .crud import (
+ create_log_entry,
create_scheduler_jobs,
+ delete_log_entries,
delete_scheduler_jobs,
+ get_log_entries,
get_scheduler_job,
get_scheduler_jobs,
update_scheduler_job,
- pause_scheduler,
- create_log_entry,
- get_log_entries,
- delete_log_entries,
- get_complete_log,
- delete_complete_log
)
-from .models import (
- CreateJobData,
- UpdateJobData,
- Job,
- JobDetailed,
- JobFilters,
- LogEntry
-)
-
+from .helpers import delete_complete_log, get_complete_log, pause_scheduler
+from .models import CreateJobData, Job, JobFilters, LogEntry, UpdateJobData
from .test_run_job import test_job
-@scheduler_ext.get(
+scheduler_api_router = APIRouter()
+
+
+@scheduler_api_router.get(
"/api/v1/test_log/{job_id}",
name="testlog",
summary="his log saves the testlogs",
@@ -48,15 +36,19 @@
dependencies=[Depends(require_admin_key)],
response_model=str,
)
-async def api_get_testlog(
- job_id: str,
- info: WalletTypeInfo = Depends(require_admin_key)
-) -> JobDetailed:
+async def api_get_testlog(job_id: str) -> Job:
+ job = await get_scheduler_job(job_id)
+ if not job:
+ raise HTTPException(
+ status_code=HTTPStatus.NOT_FOUND, detail="Job does not exist."
+ )
# print(f'inside api_get_test_log, job_id: {job_id}')
# print(f'inside api_get_test_log, adminkey : {info.wallet.adminkey}')
- return await test_job(job_id, info.wallet.adminkey)
+ await test_job(job_id)
+ return job
+
-@scheduler_ext.get(
+@scheduler_api_router.get(
"/api/v1/logentry/{log_id}",
status_code=HTTPStatus.OK,
name="Log entries for a specific job id from DB",
@@ -66,42 +58,34 @@ async def api_get_testlog(
response_model=str,
)
async def api_get_log_entries(log_id: str) -> str:
- info: WalletTypeInfo = Depends(require_admin_key)
return await get_log_entries(log_id)
-@scheduler_ext.post(
- "/api/v1/deletelog",
+@scheduler_api_router.delete(
+ "/api/v1/logentry/{log_id}",
name="Job Log Delete",
summary="Delete a Job's Log from DB",
description="Delete Job Log from DB",
dependencies=[Depends(require_admin_key)],
response_model=bool,
)
-async def api_job_log_delete(
- id: str,
- info: WalletTypeInfo = Depends(require_admin_key)
-) -> None:
- # print(f'inside api_job_log_delete: {id}')
- await delete_log_entries(id)
-
-@scheduler_ext.post(
+async def api_job_log_delete(log_id: str) -> None:
+ await delete_log_entries(log_id)
+
+
+@scheduler_api_router.post(
"/api/v1/logentry",
name="Log Entry Create",
summary="Create a new log entry in DB",
description="Create a new log entry in DB",
response_description="New Log Entry",
- response_model=LogEntry,
dependencies=[Depends(require_admin_key)],
)
-async def api_job_entry_create(
- data: LogEntry,
- info: WalletTypeInfo = Depends(require_admin_key)
-) -> bool:
+async def api_job_entry_create(data: LogEntry) -> LogEntry:
return await create_log_entry(data)
-@scheduler_ext.get(
+@scheduler_api_router.get(
"/api/v1/complete_log",
status_code=HTTPStatus.OK,
name="Complete Log",
@@ -111,11 +95,10 @@ async def api_job_entry_create(
response_model=str,
)
async def api_get_complete_log() -> str:
- info: WalletTypeInfo = Depends(require_admin_key)
return await get_complete_log()
-@scheduler_ext.post(
+@scheduler_api_router.post(
"/api/v1/delete_log",
status_code=HTTPStatus.OK,
name="delete Log",
@@ -125,25 +108,22 @@ async def api_get_complete_log() -> str:
response_model=bool,
)
async def api_delete_complete_log() -> bool:
- info: WalletTypeInfo = Depends(require_admin_key)
return await delete_complete_log()
-@scheduler_ext.get(
+@scheduler_api_router.get(
"/api/v1/jobs",
status_code=HTTPStatus.OK,
name="Jobs List",
summary="get list of jobs",
response_description="list of jobs",
- response_model=List[Job],
dependencies=[Depends(require_admin_key)],
openapi_extra=generate_filter_params_openapi(JobFilters),
)
async def api_scheduler_jobs(
- # trunk-ignore(ruff/B008)
- wallet: WalletTypeInfo = Depends(require_admin_key),
- filters: Filters[JobFilters] = Depends(parse_filters(JobFilters))
-) -> List[Job]:
+ key_info: WalletTypeInfo = Depends(require_admin_key),
+ filters: Filters = Depends(parse_filters(JobFilters)),
+) -> Page[Job]:
"""
Retrieves all jobs, supporting flexible filtering (LHS Brackets).
@@ -158,25 +138,25 @@ async def api_scheduler_jobs(
Filters are AND-combined
"""
- return await get_scheduler_jobs(wallet.wallet.adminkey, filters)
+ return await get_scheduler_jobs(key_info.wallet.adminkey, filters)
+
-@scheduler_ext.get(
+@scheduler_api_router.get(
"/api/v1/jobs/{job_id}",
name="Jobs Get",
summary="Get a specific jobs",
description="get jobs",
response_description="job if job exists",
- dependencies=[Depends(get_key_type)],
- response_model=JobDetailed
+ dependencies=[Depends(require_invoice_key)],
)
-async def api_scheduler_user(job_id: str) -> JobDetailed:
- Job = await get_scheduler_job(job_id)
- if not Job:
- raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail='Jobs not found')
- return Job
+async def api_scheduler_user(job_id: str) -> Job:
+ job = await get_scheduler_job(job_id)
+ if not job:
+ raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail="Jobs not found")
+ return job
-@scheduler_ext.post(
+@scheduler_api_router.post(
"/api/v1/jobs",
name="Job Create",
summary="Create a new job",
@@ -186,30 +166,33 @@ async def api_scheduler_user(job_id: str) -> JobDetailed:
response_model=Job,
)
async def api_scheduler_jobs_create(
- data: CreateJobData,
- info: WalletTypeInfo = Depends(require_admin_key)
+ data: CreateJobData, info: WalletTypeInfo = Depends(require_admin_key)
) -> Job:
- return await create_scheduler_jobs(info.wallet.adminkey, data)
+ return await create_scheduler_jobs(info.wallet.adminkey, data)
-@scheduler_ext.put(
+@scheduler_api_router.put(
"/api/v1/jobs/{job_id}",
name="Jobs Update",
summary="Update a jobs",
description="Update a jobs",
response_description="Updated jobs",
dependencies=[Depends(require_admin_key)],
- response_model=JobDetailed,
)
-async def api_scheduler_jobs_update(
- job_id: str,
- data: UpdateJobData,
- info: WalletTypeInfo = Depends(require_admin_key)
-) -> JobDetailed:
- return await update_scheduler_job(job_id, info.wallet.adminkey, data)
+async def api_scheduler_jobs_update(job_id: str, data: UpdateJobData) -> Job:
+ job = await get_scheduler_job(job_id)
+ if not job:
+ raise HTTPException(
+ status_code=HTTPStatus.NOT_FOUND, detail="Jobs does not exist."
+ )
+ for key, value in data.dict().items():
+ setattr(job, key, value)
-@scheduler_ext.delete(
+ return await update_scheduler_job(job)
+
+
+@scheduler_api_router.delete(
"/api/v1/jobs/{jobs_id}",
name="Jobs Delete",
summary="Delete a jobs",
@@ -218,9 +201,7 @@ async def api_scheduler_jobs_update(
responses={404: {"description": "Jobs does not exist."}},
status_code=HTTPStatus.OK,
)
-async def api_scheduler_jobs_delete(
- jobs_id
-) -> None:
+async def api_scheduler_jobs_delete(jobs_id) -> None:
jobs = await get_scheduler_job(jobs_id)
if not jobs:
raise HTTPException(
@@ -231,7 +212,7 @@ async def api_scheduler_jobs_delete(
await delete_log_entries(jobs_id)
-@scheduler_ext.post(
+@scheduler_api_router.post(
"/api/v1/pause/{job_id}/{status}",
name="Pause Jobs",
summary="Start or Stop Cron jobs",
@@ -241,9 +222,18 @@ async def api_scheduler_jobs_delete(
responses={404: {"description": "Job does not exist."}},
status_code=HTTPStatus.OK,
)
-async def api_scheduler_pause(
- job_id, status
-) -> JobDetailed:
- return await pause_scheduler(job_id, status)
-
+async def api_scheduler_pause(job_id: str, status: str) -> Job:
+ # TODO: status is not used
+ _ = status
+ job = await get_scheduler_job(job_id)
+ if not job:
+ raise HTTPException(
+ status_code=HTTPStatus.NOT_FOUND, detail="Job does not exist."
+ )
+ pause = await pause_scheduler(job_id)
+ if not pause:
+ raise HTTPException(
+ status_code=HTTPStatus.INTERNAL_SERVER_ERROR, detail="Error in pausing job."
+ )
+ return job