Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dev RELEASE: v0.15.4 #61

Merged
merged 12 commits into from
Mar 2, 2024
1 change: 1 addition & 0 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
* @dmdhrumilmistry
20 changes: 17 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,21 @@
repos:
- repo: https://github.com/ambv/black
rev: 24.1.1
rev: 23.3.0
hooks:
- id: black
language_version: python3.11
args: ["--line-length", "100", "--skip-string-normalization"]
args: ["--skip-string-normalization"]
language_version: python3
- repo: https://github.com/pycqa/flake8
rev: 7.0.0
hooks:
- id: flake8
args: ["--max-line-length=100", "--extend-ignore=E203", "--exit-zero"]
verbose: true
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: no-commit-to-branch
args: ["--branch", "main", "--branch", "release"]
- id: check-added-large-files
- id: trailing-whitespace
- id: double-quote-string-fixer
5 changes: 3 additions & 2 deletions src/offat/http.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from os import name as os_name
from aiohttp import ClientSession, ClientTimeout
from aiolimiter import AsyncLimiter
from os import name as os_name

from tenacity import retry, stop_after_attempt, retry_if_not_exception_type

import asyncio
import aiohttp.resolver
Expand Down Expand Up @@ -35,6 +35,7 @@ def __init__(self, rate_limit: float = 50, headers: dict | None = None, proxy: s
self._limiter = AsyncLimiter(max_rate=rate_limit, time_period=1)
self._timeout = ClientTimeout(total=timeout)

@retry(stop=stop_after_attempt(3), retry=retry_if_not_exception_type(KeyboardInterrupt or asyncio.exceptions.CancelledError))
async def request(self, url: str, method: str = 'GET', *args, **kwargs) -> dict:
'''Send HTTP requests asynchronously

Expand Down
14 changes: 9 additions & 5 deletions src/offat/parsers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,28 @@
from sys import exit
from requests import get as http_get
from json import loads as json_load, JSONDecodeError
from .openapi import OpenAPIv3Parser
from .swagger import SwaggerParser
from .parser import BaseParser
from ..utils import is_valid_url
from ..logger import logger


def create_parser(fpath_or_url: str, spec: dict = None) -> SwaggerParser | OpenAPIv3Parser:
'''returns parser based on doc file'''
if fpath_or_url and is_valid_url(fpath_or_url):
res = http_get(fpath_or_url)
if res.status_code != 200:
raise ValueError(
f"server returned status code {res.status_code} offat expects 200 status code"
)
logger.error(
"server returned status code %d offat expects 200 status code", res.status_code)
exit(-1)

try:
spec = json_load(res.text)
fpath_or_url = None
except JSONDecodeError as e:
raise ValueError("Invalid json data spec file url")
except JSONDecodeError:
logger.error("Invalid json data spec file url")
exit(-1)

parser = BaseParser(file_or_url=fpath_or_url, spec=spec)
if parser.is_v3:
Expand Down
21 changes: 10 additions & 11 deletions src/offat/tester/test_runner.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from asyncio import ensure_future, gather
from asyncio.exceptions import CancelledError
from enum import Enum
from sys import exc_info
from traceback import print_exc
from sys import exc_info, exit
from rich.progress import Progress, TaskID


Expand Down Expand Up @@ -70,7 +70,6 @@ def _generate_payloads(self, params: list[dict], payload_for: PayloadFor = Paylo
async def send_request(self, test_task):
url = test_task.get('url')
http_method = test_task.get('method')
success_codes = test_task.get('success_codes', [200, 301])
args = test_task.get('args')
kwargs = test_task.get('kwargs')
body_params = test_task.get('body_params')
Expand Down Expand Up @@ -127,17 +126,17 @@ async def run_tests(self, test_tasks: list, description: str | None):
tasks = []

for test_task in test_tasks:
tasks.append(
ensure_future(
self.send_request(test_task)
)
)
tasks.append(ensure_future(self.send_request(test_task)))

try:
results = await gather(*tasks)
return results

except (KeyboardInterrupt, CancelledError,):
logger.error("[!] User Interruption Detected!")
exit(-1)

except Exception as e:
console.print(
f'[*] Exception occurred while gathering results: {e}')
print_exc()
logger.error("[*] Exception occurred while gathering results: %s",
e, exc_info=exc_info())
return []
24 changes: 19 additions & 5 deletions src/offat/tester/tester_utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
from asyncio import run
"""
OWASP OFFAT Tester Utils Module
"""
from http import client as http_client
from typing import Optional
from sys import exc_info, exit
from asyncio import run
from asyncio.exceptions import CancelledError
from re import search as regex_search

from .post_test_processor import PostRunTests
Expand Down Expand Up @@ -63,10 +68,19 @@ def run_test(test_runner: TestRunner, tests: list[dict], regex_pattern: Optional
)
)

if skip_test_run:
test_results = tests
else:
test_results = run(test_runner.run_tests(tests, description))
try:
if skip_test_run:
test_results = tests
else:
test_results = run(test_runner.run_tests(tests, description))

except (KeyboardInterrupt, CancelledError,):
logger.error("[!] User Interruption Detected!")
exit(-1)

except Exception as e:
logger.error("[*] Exception occurred while running tests: %s", e, exc_info=exc_info())
return []

if post_run_matcher_test:
test_results = PostRunTests.matcher(test_results)
Expand Down
13 changes: 8 additions & 5 deletions src/offat/utils.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
"""
utils module
"""
from json import loads as json_load, dumps as json_dumps, JSONDecodeError
from pkg_resources import get_distribution
from os.path import isfile
from re import compile, match
from re import compile as re_compile, match
from pkg_resources import get_distribution
from yaml import safe_load, YAMLError
from .logger import logger

Expand Down Expand Up @@ -34,7 +37,7 @@ def read_yaml(file_path: str) -> dict:
if not isfile(file_path):
return {"error": "File Not Found"}

with open(file_path) as f:
with open(file_path, "r", encoding="utf-8") as f:
try:
return safe_load(f.read())
except YAMLError:
Expand All @@ -54,7 +57,7 @@ def read_json(file_path: str) -> dict:
if not isfile(file_path):
return {"error": "File Not Found"}

with open(file_path) as f:
with open(file_path, "r", encoding="utf-8") as f:
try:
return json_load(f.read())
except JSONDecodeError:
Expand Down Expand Up @@ -185,7 +188,7 @@ def is_valid_url(url: str) -> bool:
Raises:
Any exception occurred during operation
'''
url_regex = compile(
url_regex = re_compile(
r'https?://(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)'
)
return bool(match(url_regex, url))
Loading
Loading