diff --git a/app_builder_import_file/sigsci_TA_for_splunk-1_0_38_export.tgz b/app_builder_import_file/sigsci_TA_for_splunk-1_0_38_export.tgz new file mode 100644 index 0000000..5190f29 Binary files /dev/null and b/app_builder_import_file/sigsci_TA_for_splunk-1_0_38_export.tgz differ diff --git a/sigsci_TA_for_splunk-1.0.37.tgz b/sigsci_TA_for_splunk-1.0.37.tgz deleted file mode 100644 index 1b89f2f..0000000 Binary files a/sigsci_TA_for_splunk-1.0.37.tgz and /dev/null differ diff --git a/sigsci_TA_for_splunk-1.0.38.tgz b/sigsci_TA_for_splunk-1.0.38.tgz new file mode 100644 index 0000000..5190f29 Binary files /dev/null and b/sigsci_TA_for_splunk-1.0.38.tgz differ diff --git a/sigsci_TA_for_splunk/README.txt b/sigsci_TA_for_splunk/README.txt index 1a9bf83..b6c9de3 100644 --- a/sigsci_TA_for_splunk/README.txt +++ b/sigsci_TA_for_splunk/README.txt @@ -9,4 +9,48 @@ This is an add-on powered by the Splunk Add-on Builder. /opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui.exe: this file does not require any source code /opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-arm64.exe: this file does not require any source code /opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-arm64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/markupsafe/_speedups.cpython-37m-x86_64-linux-gnu.so: this file does not require any source code +# Binary File Declaration +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/pvectorc.cpython-37m-x86_64-linux-gnu.so: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/markupsafe/_speedups.cpython-37m-x86_64-linux-gnu.so: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-32.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-arm64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-arm64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-32.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-64.exe: this file does not require any source code +# Binary File Declaration +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/pvectorc.cpython-37m-x86_64-linux-gnu.so: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/markupsafe/_speedups.cpython-37m-x86_64-linux-gnu.so: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-32.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-arm64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-arm64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-32.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-64.exe: this file does not require any source code +# Binary File Declaration +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/pvectorc.cpython-37m-x86_64-linux-gnu.so: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-32.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-32.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-arm64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-arm64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/markupsafe/_speedups.cpython-37m-x86_64-linux-gnu.so: this file does not require any source code +# Binary File Declaration +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/pvectorc.cpython-37m-x86_64-linux-gnu.so: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-32.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-32.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-arm64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui-arm64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/cli-64.exe: this file does not require any source code +/opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/setuptools/gui.exe: this file does not require any source code /opt/splunk/var/data/tabuilder/package/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/markupsafe/_speedups.cpython-37m-x86_64-linux-gnu.so: this file does not require any source code \ No newline at end of file diff --git a/sigsci_TA_for_splunk/README/inputs.conf.spec b/sigsci_TA_for_splunk/README/inputs.conf.spec index 24169b9..ae675ea 100644 --- a/sigsci_TA_for_splunk/README/inputs.conf.spec +++ b/sigsci_TA_for_splunk/README/inputs.conf.spec @@ -1,8 +1,21 @@ [SigsciEvent://] site_api_name = This is the Site API Name. It should not be a URL. +disable_catchup = Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True. +twenty_hour_catchup = In the event the last time stored is >24Hours the TA will try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be False in order to work. +request_timeout = Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large. +read_timeout = Configured Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large. + +[SigsciActivity://] +disable_catchup = Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True. +twenty_hour_catchup = In the event the last time stored is >24Hours the TA will try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be false in order to work. +request_timeout = Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large. +read_timeout = Configures Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large. [SigsciRequests://] site_api_name = This is the API Name of the site to pull request data from. This should not be a URL. - -[SigsciActivity://] -place_holder = It was required to have one option even if it isn't needed. You can skip this one. \ No newline at end of file +request_limit = The amount of request objects returned in the array. Default: 100. Max:1000 +disable_catchup = Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True. +twenty_hour_catchup = In the event the last time stored is >24hours the TA will try can try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be False in order to work. +attack_and_anomaly_signals_only = Only retrieves requests that contain attack or anomaly signals. Please evaluate your signal configuration if there are overly inclusive signals creating excessive requests. +request_timeout = Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large. +read_timeout = Configures Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large. \ No newline at end of file diff --git a/sigsci_TA_for_splunk/app.manifest b/sigsci_TA_for_splunk/app.manifest index ab5d89a..5d78a04 100644 --- a/sigsci_TA_for_splunk/app.manifest +++ b/sigsci_TA_for_splunk/app.manifest @@ -5,7 +5,7 @@ "id": { "group": null, "name": "sigsci_TA_for_splunk", - "version": "1.0.37" + "version": "1.0.38" }, "author": [ { diff --git a/sigsci_TA_for_splunk/appserver/static/js/build/globalConfig.json b/sigsci_TA_for_splunk/appserver/static/js/build/globalConfig.json index ef3a890..3199a1b 100644 --- a/sigsci_TA_for_splunk/appserver/static/js/build/globalConfig.json +++ b/sigsci_TA_for_splunk/appserver/static/js/build/globalConfig.json @@ -248,8 +248,28 @@ "label": "Site API Name" }, { - "field": "place_holder", - "label": "Place Holder" + "field": "disable_catchup", + "label": "Disable Catchup" + }, + { + "field": "twenty_hour_catchup", + "label": "24 Hour Catchup" + }, + { + "field": "request_timeout", + "label": "Request Timeout" + }, + { + "field": "read_timeout", + "label": "read_timeout" + }, + { + "field": "request_limit", + "label": "Request Limit" + }, + { + "field": "attack_and_anomaly_signals_only", + "label": "Attack & Anomaly Signals Only" } ], "actions": [ @@ -332,12 +352,65 @@ "errorMsg": "Max length of text input is 8192" } ] + }, + { + "field": "disable_catchup", + "label": "Disable Catchup", + "help": "Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", + "required": false, + "type": "checkbox", + "defaultValue": true + }, + { + "field": "twenty_hour_catchup", + "label": "24 Hour Catchup", + "help": "In the event the last time stored is >24Hours the TA will try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be False in order to work. ", + "required": false, + "type": "checkbox" + }, + { + "field": "request_timeout", + "label": "Request Timeout", + "help": "Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + "required": true, + "type": "text", + "defaultValue": "60", + "validators": [ + { + "type": "string", + "minLength": 0, + "maxLength": 8192, + "errorMsg": "Max length of text input is 8192" + } + ], + "options": { + "placeholder": "60" + } + }, + { + "field": "read_timeout", + "label": "read_timeout", + "help": "Configured Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + "required": true, + "type": "text", + "defaultValue": "60", + "validators": [ + { + "type": "string", + "minLength": 0, + "maxLength": 8192, + "errorMsg": "Max length of text input is 8192" + } + ], + "options": { + "placeholder": "60" + } } ] }, { - "name": "SigsciRequests", - "title": "SigsciRequests", + "name": "SigsciActivity", + "title": "SigsciActivity", "entity": [ { "field": "name", @@ -394,11 +467,27 @@ ] }, { - "field": "site_api_name", - "label": "Site API Name", - "help": "This is the API Name of the site to pull request data from. This should not be a URL.", + "field": "disable_catchup", + "label": "Disable Catchup", + "help": "Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", + "required": false, + "type": "checkbox", + "defaultValue": true + }, + { + "field": "twenty_hour_catchup", + "label": "24 Hour Catchup", + "help": "In the event the last time stored is >24Hours the TA will try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be false in order to work. ", + "required": false, + "type": "checkbox" + }, + { + "field": "request_timeout", + "label": "Request Timeout", + "help": "Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required": true, "type": "text", + "defaultValue": "60", "validators": [ { "type": "string", @@ -406,13 +495,35 @@ "maxLength": 8192, "errorMsg": "Max length of text input is 8192" } - ] + ], + "options": { + "placeholder": "60" + } + }, + { + "field": "read_timeout", + "label": "Read Timeout", + "help": "Configures Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + "required": true, + "type": "text", + "defaultValue": "60", + "validators": [ + { + "type": "string", + "minLength": 0, + "maxLength": 8192, + "errorMsg": "Max length of text input is 8192" + } + ], + "options": { + "placeholder": "60" + } } ] }, { - "name": "SigsciActivity", - "title": "SigsciActivity", + "name": "SigsciRequests", + "title": "SigsciRequests", "entity": [ { "field": "name", @@ -469,11 +580,65 @@ ] }, { - "field": "place_holder", - "label": "Place Holder", - "help": "It was required to have one option even if it isn't needed. You can skip this one.", + "field": "site_api_name", + "label": "Site API Name", + "help": "This is the API Name of the site to pull request data from. This should not be a URL.", + "required": true, + "type": "text", + "validators": [ + { + "type": "string", + "minLength": 0, + "maxLength": 8192, + "errorMsg": "Max length of text input is 8192" + } + ] + }, + { + "field": "request_limit", + "label": "Request Limit", + "help": "The amount of request objects returned in the array. Default: 100. Max:1000", + "required": true, + "type": "text", + "defaultValue": "1000", + "validators": [ + { + "type": "string", + "minLength": 0, + "maxLength": 8192, + "errorMsg": "Max length of text input is 8192" + } + ] + }, + { + "field": "disable_catchup", + "label": "Disable Catchup", + "help": "Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", + "required": false, + "type": "checkbox", + "defaultValue": true + }, + { + "field": "twenty_hour_catchup", + "label": "24 Hour Catchup", + "help": "In the event the last time stored is >24hours the TA will try can try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be False in order to work.", + "required": false, + "type": "checkbox" + }, + { + "field": "attack_and_anomaly_signals_only", + "label": "Attack & Anomaly Signals Only", + "help": "Only retrieves requests that contain attack or anomaly signals. Please evaluate your signal configuration if there are overly inclusive signals creating excessive requests.", "required": false, + "type": "checkbox" + }, + { + "field": "request_timeout", + "label": "Request Timeout", + "help": "Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + "required": true, "type": "text", + "defaultValue": "60", "validators": [ { "type": "string", @@ -483,8 +648,24 @@ } ], "options": { - "placeholder": "Not needed" + "placeholder": "Request Timeout" } + }, + { + "field": "read_timeout", + "label": "Read Timeout", + "help": "Configures Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + "required": true, + "type": "text", + "defaultValue": "60", + "validators": [ + { + "type": "string", + "minLength": 0, + "maxLength": 8192, + "errorMsg": "Max length of text input is 8192" + } + ] } ] } diff --git a/sigsci_TA_for_splunk/bin/SigsciActivity.py b/sigsci_TA_for_splunk/bin/SigsciActivity.py index c7ebb7d..bf0464b 100644 --- a/sigsci_TA_for_splunk/bin/SigsciActivity.py +++ b/sigsci_TA_for_splunk/bin/SigsciActivity.py @@ -46,10 +46,22 @@ def get_scheme(self): For customized inputs, hard code the arguments here to hide argument detail from users. For other input types, arguments should be get from input_module. Defining new input types could be easier. """ - scheme.add_argument(smi.Argument("place_holder", title="Place Holder", - description="It was required to have one option even if it isn\'t needed. You can skip this one.", + scheme.add_argument(smi.Argument("disable_catchup", title="Disable Catchup", + description="Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", required_on_create=False, required_on_edit=False)) + scheme.add_argument(smi.Argument("twenty_hour_catchup", title="24 Hour Catchup", + description="In the event the last time stored is >24Hours the TA will try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. \'Disable Catchup\' must be false in order to work.", + required_on_create=False, + required_on_edit=False)) + scheme.add_argument(smi.Argument("request_timeout", title="Request Timeout", + description="Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + required_on_create=True, + required_on_edit=False)) + scheme.add_argument(smi.Argument("read_timeout", title="Read Timeout", + description="Configures Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + required_on_create=True, + required_on_edit=False)) return scheme def get_app_name(self): @@ -69,6 +81,8 @@ def get_account_fields(self): def get_checkbox_fields(self): checkbox_fields = [] + checkbox_fields.append("disable_catchup") + checkbox_fields.append("twenty_hour_catchup") return checkbox_fields def get_global_checkbox_fields(self): diff --git a/sigsci_TA_for_splunk/bin/SigsciEvent.py b/sigsci_TA_for_splunk/bin/SigsciEvent.py index 8559e5d..abfa021 100644 --- a/sigsci_TA_for_splunk/bin/SigsciEvent.py +++ b/sigsci_TA_for_splunk/bin/SigsciEvent.py @@ -50,6 +50,22 @@ def get_scheme(self): description="This is the Site API Name. It should not be a URL.", required_on_create=True, required_on_edit=False)) + scheme.add_argument(smi.Argument("disable_catchup", title="Disable Catchup", + description="Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", + required_on_create=False, + required_on_edit=False)) + scheme.add_argument(smi.Argument("twenty_hour_catchup", title="24 Hour Catchup", + description="In the event the last time stored is >24Hours the TA will try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. \'Disable Catchup\' must be False in order to work.", + required_on_create=False, + required_on_edit=False)) + scheme.add_argument(smi.Argument("request_timeout", title="Request Timeout", + description="Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + required_on_create=True, + required_on_edit=False)) + scheme.add_argument(smi.Argument("read_timeout", title="read_timeout", + description="Configured Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + required_on_create=True, + required_on_edit=False)) return scheme def get_app_name(self): @@ -69,6 +85,8 @@ def get_account_fields(self): def get_checkbox_fields(self): checkbox_fields = [] + checkbox_fields.append("disable_catchup") + checkbox_fields.append("twenty_hour_catchup") return checkbox_fields def get_global_checkbox_fields(self): diff --git a/sigsci_TA_for_splunk/bin/SigsciRequests.py b/sigsci_TA_for_splunk/bin/SigsciRequests.py index 638eb83..05dc101 100644 --- a/sigsci_TA_for_splunk/bin/SigsciRequests.py +++ b/sigsci_TA_for_splunk/bin/SigsciRequests.py @@ -50,6 +50,30 @@ def get_scheme(self): description="This is the API Name of the site to pull request data from. This should not be a URL.", required_on_create=True, required_on_edit=False)) + scheme.add_argument(smi.Argument("request_limit", title="Request Limit", + description="The amount of request objects returned in the array. Default: 100. Max:1000", + required_on_create=True, + required_on_edit=False)) + scheme.add_argument(smi.Argument("disable_catchup", title="Disable Catchup", + description="Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", + required_on_create=False, + required_on_edit=False)) + scheme.add_argument(smi.Argument("twenty_hour_catchup", title="24 Hour Catchup", + description="In the event the last time stored is >24hours the TA will try can try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. \'Disable Catchup\' must be False in order to work.", + required_on_create=False, + required_on_edit=False)) + scheme.add_argument(smi.Argument("attack_and_anomaly_signals_only", title="Attack & Anomaly Signals Only", + description="Only retrieves requests that contain attack or anomaly signals. Please evaluate your signal configuration if there are overly inclusive signals creating excessive requests.", + required_on_create=False, + required_on_edit=False)) + scheme.add_argument(smi.Argument("request_timeout", title="Request Timeout", + description="Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + required_on_create=True, + required_on_edit=False)) + scheme.add_argument(smi.Argument("read_timeout", title="Read Timeout", + description="Configures Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", + required_on_create=True, + required_on_edit=False)) return scheme def get_app_name(self): @@ -69,6 +93,9 @@ def get_account_fields(self): def get_checkbox_fields(self): checkbox_fields = [] + checkbox_fields.append("disable_catchup") + checkbox_fields.append("twenty_hour_catchup") + checkbox_fields.append("attack_and_anomaly_signals_only") return checkbox_fields def get_global_checkbox_fields(self): diff --git a/sigsci_TA_for_splunk/bin/input_module_SigsciActivity.py b/sigsci_TA_for_splunk/bin/input_module_SigsciActivity.py index 45093e3..992baca 100644 --- a/sigsci_TA_for_splunk/bin/input_module_SigsciActivity.py +++ b/sigsci_TA_for_splunk/bin/input_module_SigsciActivity.py @@ -3,7 +3,7 @@ import json import time from datetime import datetime -from sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time +from sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time, validate_timeouts """ IMPORTANT @@ -18,7 +18,15 @@ def validate_input(helper, definition): - # This example accesses the modular input variable + request_timeout = definition.parameters.get("request_timeout", None) + read_timeout = definition.parameters.get("read_timeout", None) + validate_timeouts(request_timeout, read_timeout) + + # Catchup Opts + twenty_hour_catchup = definition.parameters.get('twenty_hour_catchup', None) + disable_catchup = definition.parameters.get('disable_catchup', None) + if twenty_hour_catchup and disable_catchup is True: + raise ValueError(f"Catch up values are mutually exclusive") pass @@ -34,6 +42,18 @@ def collect_events(helper, ew): api_host = "https://dashboard.signalsciences.net" helper.log_info("email: %s" % global_email) helper.log_info("corp: %s" % global_corp_api_name) + + # Request / Read Timeouts + request_timeout = float(helper.get_arg("request_timeout")) + read_timeout = float(helper.get_arg('read_timeout')) + helper.log_info(f"request configuration is: request:{request_timeout}, read: {read_timeout}") + + # CatchUp Config Declaration + twenty_hour_catchup = helper.get_arg('twenty_hour_catchup') + helper.log_info(f"twenty four hour catchup is: {twenty_hour_catchup}") + + disable_catchup = helper.get_arg('disable_catchup') + helper.log_info(f"disable catchup is: {disable_catchup}") def pull_events(delta, key=None): last_run_until = helper.get_check_point("activity_last_until_time") @@ -44,7 +64,7 @@ def pull_events(delta, key=None): ) else: (until_time, from_time) = get_until_time( - helper, last_run_until, delta, five_min_offset=False + helper, last_run_until, delta, twenty_hour_catchup=twenty_hour_catchup, catchup_disabled=disable_catchup, five_min_offset=False ) if from_time is None: helper.log_info(f"{last_run_until} >= current now time, skipping run") @@ -88,6 +108,8 @@ def pull_events(delta, key=None): global_email=global_email, global_corp_api_name=global_corp_api_name, current_site="", + request_timeout=request_timeout, + read_timeout=read_timeout, ) config.headers = { "Content-type": "application/json", diff --git a/sigsci_TA_for_splunk/bin/input_module_SigsciEvent.py b/sigsci_TA_for_splunk/bin/input_module_SigsciEvent.py index 34e9097..df85116 100644 --- a/sigsci_TA_for_splunk/bin/input_module_SigsciEvent.py +++ b/sigsci_TA_for_splunk/bin/input_module_SigsciEvent.py @@ -4,7 +4,7 @@ import json import time from datetime import datetime -from sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time +from sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time, validate_timeouts """ IMPORTANT @@ -19,7 +19,10 @@ def validate_input(helper, definition): - # This example accesses the modular input variable + request_timeout = definition.parameters.get("request_timeout", None) + read_timeout = definition.parameters.get("read_timeout", None) + validate_timeouts(request_timeout, read_timeout) + site_name = definition.parameters.get("site_api_name", None) if site_name is None or site_name == "": msg = "The site_name can not be empty" @@ -37,6 +40,13 @@ def validate_input(helper, definition): "My Site Name", ) raise ValueError("InvalidSiteName", msg) + + # Catchup Opts + twenty_hour_catchup = definition.parameters.get('twenty_hour_catchup', None) + disable_catchup = definition.parameters.get('disable_catchup', None) + if twenty_hour_catchup and disable_catchup is True: + raise ValueError(f"Catch up values are mutually exclusive") + pass @@ -51,6 +61,18 @@ def collect_events(helper, ew): api_host = "https://dashboard.signalsciences.net" helper.log_info("email: %s" % global_email) helper.log_info("corp: %s" % global_corp_api_name) + + # Request / Read Timeouts + request_timeout = float(helper.get_arg("request_timeout")) + read_timeout = float(helper.get_arg('read_timeout')) + helper.log_info(f"request configuration is: request:{request_timeout}, read: {read_timeout}") + + # Config Declaration + twenty_hour_catchup = helper.get_arg('twenty_hour_catchup') + helper.log_info(f"twenty four hour catchup is: {twenty_hour_catchup}") + + disable_catchup = helper.get_arg('disable_catchup') + helper.log_info(f"disable catchup is: {disable_catchup}") def pull_events(current_site, delta, key=None): site_name = current_site @@ -63,7 +85,7 @@ def pull_events(current_site, delta, key=None): ) else: (until_time, from_time) = get_until_time( - helper, last_run_until, delta, five_min_offset=False + helper, last_run_until, delta, twenty_hour_catchup=twenty_hour_catchup, catchup_disabled=disable_catchup, five_min_offset=False ) if from_time is None or from_time > until_time: helper.log_info(f"{from_time} >= current now time, skipping run") @@ -109,6 +131,8 @@ def pull_events(current_site, delta, key=None): global_email=global_email, global_corp_api_name=global_corp_api_name, current_site=current_site, + request_timeout=request_timeout, + read_timeout=read_timeout, ) config.headers = { "Content-type": "application/json", @@ -194,4 +218,4 @@ def pull_events(current_site, delta, key=None): end = timer() total_time = end - start time_result = round(total_time, 2) - helper.log_info("Total Script Time: %s seconds" % time_result) + helper.log_info("Total Script Time: %s seconds" % time_result) \ No newline at end of file diff --git a/sigsci_TA_for_splunk/bin/input_module_SigsciRequests.py b/sigsci_TA_for_splunk/bin/input_module_SigsciRequests.py index 93d9329..7ca9775 100644 --- a/sigsci_TA_for_splunk/bin/input_module_SigsciRequests.py +++ b/sigsci_TA_for_splunk/bin/input_module_SigsciRequests.py @@ -2,7 +2,7 @@ from timeit import default_timer as timer import time from datetime import datetime, timezone, timedelta -from sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time +from sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time, validate_timeouts """ IMPORTANT @@ -14,9 +14,27 @@ # def use_single_instance_mode(): # return True +def validate_input(helper,definition): + request_limit = int(definition.parameters.get("request_limit", None)) + if request_limit is None or request_limit == "": + raise ValueError('The request limit cannot be blank') + if request_limit <= 0: + raise ValueError('The request limit cannot be 0') + if request_limit > 1000: + raise ValueError('Request Limit cannot be greater than 1000') + + # Read Timeout passed to send_http_request. Type: float. + # https://docs.splunk.com/Documentation/AddonBuilder/4.1.4/UserGuide/PythonHelperFunctions + # We do this per input module as splunk provides no way to validate global configuration arguments :') + request_timeout = definition.parameters.get("request_timeout", None) + read_timeout = definition.parameters.get("read_timeout", None) + validate_timeouts(request_timeout, read_timeout) + + twenty_hour_catchup = definition.parameters.get('twenty_hour_catchup', None) + disable_catchup = definition.parameters.get('disable_catchup', None) + if twenty_hour_catchup and disable_catchup is True: + raise ValueError(f"Catch up values are mutually exclusive") -def validate_input(helper, definition): - # This example accesses the modular input variable site_name = definition.parameters.get("site_api_name", None) if site_name is None or site_name == "": msg = "The site_name can not be empty" @@ -43,19 +61,35 @@ def collect_events(helper, ew): helper.set_log_level(loglevel) # Proxy setting configuration # proxy_settings = helper.get_proxy() + api_host = "https://dashboard.signalsciences.net" global_email = helper.get_global_setting("email") global_api_token = helper.get_global_setting("api_token") global_corp_api_name = helper.get_global_setting("corp_api_name") - api_host = "https://dashboard.signalsciences.net" helper.log_info("email: %s" % global_email) helper.log_info("corp: %s" % global_corp_api_name) + + # Request / Read Timeouts + request_timeout = float(helper.get_arg("request_timeout")) + read_timeout = float(helper.get_arg('read_timeout')) + helper.log_info(f"request configuration is: request:{request_timeout}, read: {read_timeout}") + + # Config declaration. + twenty_hour_catchup = helper.get_arg('twenty_hour_catchup') + helper.log_info(f"twenty four hour catchup is: {twenty_hour_catchup}") + + disable_catchup = helper.get_arg('disable_catchup') + helper.log_info(f"disable catchup is: {disable_catchup}") + + attack_and_anomaly_signals_only = helper.get_arg('attack_and_anomaly_signals_only') + helper.log_info(f"attack signals only is: {attack_and_anomaly_signals_only}") def pull_requests(helper, current_site, delta, key=None): site_name = current_site last_name = f"requests_last_until_time_{current_site}" last_run_until = helper.get_check_point(last_name) - - + request_limit = helper.get_arg('request_limit') + helper.log_info(f"request limit: {request_limit}") + if last_run_until is None: helper.log_info("no last_run_time found in checkpoint state") helper.log_debug("get_from_until") @@ -66,7 +100,7 @@ def pull_requests(helper, current_site, delta, key=None): helper.log_info(f"last_run_until found in state: {last_run_until}") helper.log_debug("get_until") until_time, from_time = get_until_time( - helper, last_run_until, delta, five_min_offset=True + helper, last_run_until, delta, twenty_hour_catchup, disable_catchup, five_min_offset=True ) if from_time is None: @@ -79,7 +113,7 @@ def pull_requests(helper, current_site, delta, key=None): f"from_time {from_time} >= until_time {until_time}, skipping run" ) return - + helper.log_info("SiteName: %s" % site_name) helper.log_info(f"Start Period: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(from_time))}") helper.log_info(f"End Period: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(until_time))}") @@ -101,10 +135,53 @@ def pull_requests(helper, current_site, delta, key=None): # Loop across all the data and output it in one big JSON object url = ( f"{api_host}/api/v0/corps/{global_corp_api_name}" - f"/sites/{site_name}/feed/requests?" - f"from={from_time}&until={until_time}" + f"/sites/{site_name}/feed/requests" + f"?limit={request_limit}" + f"&from={from_time}&until={until_time}" ) - helper.log_info("Pulling requests from requests API") + if attack_and_anomaly_signals_only: + attack_signals = [ + "USERAGENT", + "AWS-SSRF", + "BACKDOOR", + "CMDEXE", + "SQLI", + "TRAVERSAL", + "XSS", + "XXE" + ] + anomaly_signals = [ + "2FA-DISABLED", "2FA-CHANGED", "ABNORMALPATH", "ADDRESS-CHANGED", "ALLOWED", + "BHH", "BLOCKED", "BODY-PARSER-EVASION", "CODEINJECTION", "COMPRESSED", + "CC-VAL-ATTEMPT", "CC-VAL-FAILURE", "CC-VAL-SUCCESS", "CVE-2017-5638", + "CVE-2017-7269", "CVE-2017-9805", "CVE-2018-11776", "CVE-2018-15961", + "CVE-2018-9206", "CVE-2019-0192", "CVE-2019-0193", "CVE-2019-0232", + "CVE-2019-11580", "CVE-2019-14234", "CVE-2019-16759", "CVE-2019-2725", + "CVE-2019-3396", "CVE-2019-3398", "CVE-2019-5418", "CVE-2019-6340", + "CVE-2019-8394", "CVE-2019-8451", "CVE-2021-26084", "CVE-2021-26855", + "CVE-2021-40438", "CVE-2021-44228", "CVE-2021-44228-STRICT", + "CVE-2022-22963", "CVE-2022-22965", "CVE-2022-26134", "CVE-2022-42889", + "CVE-2023-34362", "CVE-2023-38218", "DATACENTER", "DOUBLEENCODING", + "EMAIL-CHANGED", "EMAIL-VALIDATION", "FORCEFULBROWSING", "GC-VAL-ATTEMPT", + "GC-VAL-FAILURE", "GC-VAL-SUCCESS", "GRAPHQL-API", "GRAPHQL-DUPLICATE-VARIABLES", + "GRAPHQL-IDE", "GRAPHQL-INTROSPECTION", "GRAPHQL-DEPTH", + "GRAPHQL-MISSING-REQUIRED-OPERATION-NAME", + "GRAPHQL-UNDEFINED-VARIABLES", "HTTP403", "HTTP404", "HTTP429", + "HTTP4XX", "HTTP500", "HTTP503", "HTTP5XX", "IMPOSTOR", "INFO-VIEWED", + "INSECURE-AUTH", "NOTUTF8", "INVITE-FAILURE", "INVITE-ATTEMPT", + "INVITE-SUCCESS", "JSON-ERROR", "KBA-CHANGED", "LOGINATTEMPT", + "LOGINDISCOVERY", "LOGINFAILURE", "LOGINSUCCESS", "MALFORMED-DATA", + "SANS", "MESSAGE-SENT", "NO-CONTENT-TYPE", "NOUA", "NULLBYTE", + "OOB-DOMAIN", "PW-CHANGED", "PW-RESET-ATTEMPT", "PW-RESET-FAILURE", + "PW-RESET-SUCCESS", "PRIVATEFILE", "rate-limit", "REGATTEMPT", "REGFAILURE", + "REGSUCCESS", "RSRC-ID-ENUM-ATTEMPT", "RSRC-ID-ENUM-FAILURE", + "RSRC-ID-ENUM-SUCCESS", "RESPONSESPLIT", "SCANNER", "SIGSCI-IP", + "TORNODE", "WRONG-API-CLIENT", "USER-ID-ENUM-ATTEMPT", + "USER-ID-ENUM-FAILURE", "USER-ID-ENUM-SUCCESS", "WEAKTLS", "XML-ERROR" + ] + attack_tags = ",".join(attack_signals) + anomaly_tags = ",".join(anomaly_signals) + url = f"{url}&tags={attack_tags},{anomaly_tags}" config = Config( url=url, api_host=api_host, @@ -113,6 +190,8 @@ def pull_requests(helper, current_site, delta, key=None): global_email=global_email, global_corp_api_name=global_corp_api_name, current_site=current_site, + request_timeout=request_timeout, + read_timeout=read_timeout, ) config.headers = { "Content-type": "application/json", @@ -131,7 +210,7 @@ def pull_requests(helper, current_site, delta, key=None): f"No events to write, saving checkpoint to value:{until_time}" ) write_start = timer() - event_count = 0 + event_count = 0 for current_event in all_requests: if key is None: source_type = helper.get_sourcetype() @@ -157,13 +236,13 @@ def pull_requests(helper, current_site, delta, key=None): try: ew.write_event(event) event_count += 1 # increment the count for successful events to not spam debug. - helper.save_check_point(last_name, until_time) except Exception as e: helper.log_error(f"error writing event: {e}") helper.log_error(event) raise e - if event_count != 0: # We save the checkpoint earlier on 0 events. - helper.log_info(f"{event_count} events written, saving checkpoint: {until_time}") + if event_count != 0: # We save the checkpoint earlier on 0 events. + helper.log_info(f"{event_count} events written, saving checkpoint: {until_time}") + helper.save_check_point(last_name, until_time) write_end = timer() write_time = write_end - write_start write_time_result = round(write_time, 2) @@ -191,4 +270,4 @@ def pull_requests(helper, current_site, delta, key=None): end = timer() total_time = end - start time_result = round(total_time, 2) - helper.log_info("Total Script Time: %s seconds" % time_result) + helper.log_info("Total Script Time: %s seconds" % time_result) \ No newline at end of file diff --git a/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciActivity.py b/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciActivity.py index 99d162b..868de4e 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciActivity.py +++ b/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciActivity.py @@ -34,10 +34,34 @@ ) ), field.RestField( - 'place_holder', + 'disable_catchup', + required=False, + encrypted=False, + default=True, + validator=None + ), + field.RestField( + 'twenty_hour_catchup', required=False, encrypted=False, default=None, + validator=None + ), + field.RestField( + 'request_timeout', + required=True, + encrypted=False, + default='60', + validator=validator.String( + min_len=0, + max_len=8192, + ) + ), + field.RestField( + 'read_timeout', + required=True, + encrypted=False, + default='60', validator=validator.String( min_len=0, max_len=8192, diff --git a/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciEvent.py b/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciEvent.py index 8f11ab7..3699cb4 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciEvent.py +++ b/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciEvent.py @@ -43,6 +43,40 @@ max_len=8192, ) ), + field.RestField( + 'disable_catchup', + required=False, + encrypted=False, + default=True, + validator=None + ), + field.RestField( + 'twenty_hour_catchup', + required=False, + encrypted=False, + default=None, + validator=None + ), + field.RestField( + 'request_timeout', + required=True, + encrypted=False, + default='60', + validator=validator.String( + min_len=0, + max_len=8192, + ) + ), + field.RestField( + 'read_timeout', + required=True, + encrypted=False, + default='60', + validator=validator.String( + min_len=0, + max_len=8192, + ) + ), field.RestField( 'disabled', diff --git a/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciRequests.py b/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciRequests.py index 1e513c3..762971f 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciRequests.py +++ b/sigsci_TA_for_splunk/bin/sigsci_TA_for_splunk_rh_SigsciRequests.py @@ -43,6 +43,57 @@ max_len=8192, ) ), + field.RestField( + 'request_limit', + required=True, + encrypted=False, + default='1000', + validator=validator.String( + min_len=0, + max_len=8192, + ) + ), + field.RestField( + 'disable_catchup', + required=False, + encrypted=False, + default=True, + validator=None + ), + field.RestField( + 'twenty_hour_catchup', + required=False, + encrypted=False, + default=None, + validator=None + ), + field.RestField( + 'attack_and_anomaly_signals_only', + required=False, + encrypted=False, + default=None, + validator=None + ), + field.RestField( + 'request_timeout', + required=True, + encrypted=False, + default='60', + validator=validator.String( + min_len=0, + max_len=8192, + ) + ), + field.RestField( + 'read_timeout', + required=True, + encrypted=False, + default='60', + validator=validator.String( + min_len=0, + max_len=8192, + ) + ), field.RestField( 'disabled', diff --git a/sigsci_TA_for_splunk/bin/sigsci_helper.py b/sigsci_TA_for_splunk/bin/sigsci_helper.py index aa91749..d96a715 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_helper.py +++ b/sigsci_TA_for_splunk/bin/sigsci_helper.py @@ -1,17 +1,42 @@ -import json +import json, sys, os from datetime import datetime, timedelta, timezone from timeit import default_timer as timer +from urllib.parse import urlparse, parse_qs import time import requests + +def validate_timeouts(request_timeout, read_timeout): + # Read Timeout passed to send_http_request. Type: float. + # https://docs.splunk.com/Documentation/AddonBuilder/4.1.4/UserGuide/PythonHelperFunctions + # We do this per input module as splunk provides no way to validate global configuration arguments. + if request_timeout is None: + raise ValueError("Request timeout configuration is missing") + try: + request_timeout = float(request_timeout) + except ValueError: + raise ValueError(f"Invalid request timeout value: {request_timeout}") + if request_timeout > 300.0 or request_timeout <= 0: + raise ValueError(f"Request timeout must be between 0 and 300 seconds, got {request_timeout}") + + # Read Timeout passed to send_http_request. Type: float. + if read_timeout is None: + raise ValueError("Read timeout configuration is missing") + try: + read_timeout = float(read_timeout) + except ValueError: + raise ValueError(f"Invalid read timeout value: {read_timeout}") + if read_timeout > 300.0 or read_timeout <= 0: + raise ValueError(f"Read timeout must be between 0 and 300 seconds, got {read_timeout}") + def check_response( - code, - response_text, - global_email, - global_corp_api_name, - from_time=None, - until_time=None, - current_site=None, + code, + response_text, + global_email, + global_corp_api_name, + from_time=None, + until_time=None, + current_site=None, ): success = False base_msg = { @@ -30,6 +55,9 @@ def check_response( else: base_msg["error"] = "BAD API Request" base_msg["msg"] = "bad-request" + if code == 414: + base_msg["error"] = "request uri size exceeded" + base_msg["msg"] = "request-uri-too-large" elif code == 500: base_msg["error"] = "Internal Server Error" base_msg["msg"] = "internal-error" @@ -38,7 +66,10 @@ def check_response( "Unauthorized. Incorrect credentials or lack of permissions" ) base_msg["msg"] = "unauthorized" - elif 400 <= code <= 599 and code != 400 and code != 500 and code != 401: + elif code == 429: + base_msg["error"] = "Too Many Requests" + base_msg["msg"] = "too-many-requests" + elif code is not None and 400 <= code <= 599 and code not in [400, 500, 401]: base_msg["error"] = "Unknown Error" base_msg["msg"] = "other-error" else: @@ -46,19 +77,20 @@ def check_response( return success, base_msg -def get_request_data(url, headers, helper): - method = "GET" +def get_request_data(url, method, payload, headers, request_timeout, read_timeout, helper): + response_code = None + response_error = "Initial error state" try: response_raw = helper.send_http_request( url, method, parameters=None, - payload=None, + payload=payload, headers=headers, cookies=None, verify=True, cert=None, - timeout=None, + timeout=(request_timeout, read_timeout), use_proxy=True, ) response_code = response_raw.status_code @@ -72,13 +104,13 @@ def get_request_data(url, headers, helper): helper.log_error(error) except Exception as error: data = {"data": []} - helper.log_info("Unable to parse API Response") + helper.log_info("HTTP Request Failed") helper.log_error(error) - response_code = 500 - response_error = "Unable to parse API Response" + response_error = "Request Failure" return data, response_code, response_error + def timestamp_sanitise(_time): return _time - _time % 60 @@ -87,7 +119,6 @@ def get_from_and_until_times(helper, delta, five_min_offset=False): until_time = int(time.time()) helper.log_info(f"Time Now: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(int(time.time())))}") - # Check if five_min_offset is needed. if five_min_offset: until_time -= 5 * 60 # Subtract 5 minutes in seconds @@ -107,9 +138,10 @@ def get_from_and_until_times(helper, delta, five_min_offset=False): SECONDS_IN_DAY= 24 * 60 * 60 -def get_until_time(helper, from_time, interval, five_min_offset=False): +def get_until_time(helper, from_time, delta, twenty_hour_catchup, catchup_disabled, five_min_offset=False): # Get current epoch time rounded down to nearest minute now = timestamp_sanitise(int(time.time())) + from_time = timestamp_sanitise(from_time) helper.log_info(f"Time Now: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(now))}") if five_min_offset: @@ -119,26 +151,54 @@ def get_until_time(helper, from_time, interval, five_min_offset=False): # Calculate the difference between now and the from_time time_difference = now - from_time - - # If the difference is more than 24 hours (in seconds), reset the from_time - if time_difference > SECONDS_IN_DAY: # 24 hours in seconds - helper.log_info("Adjusting from_time to 24 hours ago") - adjusted_from_time = now - SECONDS_IN_DAY # Subtract 24 hours in seconds - helper.log_info(f"Previous Run: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(from_time))}") - helper.log_info(f"Adjusted from_time: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(adjusted_from_time))}") - return until_time, adjusted_from_time + + # If catchups are disabled, don't catch up at all. + # We evaluate by ecking if the time difference is greater than the delta added to itself. + if catchup_disabled: + if time_difference > delta + delta: + helper.log_debug("Last checkpoint is greater than current delta. Not attempting to catch up and resetting from delta.") + until_time, from_time = get_from_and_until_times( + helper, delta, five_min_offset=True + ) + return until_time, from_time + + # If the difference is more than 24 hours (in seconds). + if time_difference > SECONDS_IN_DAY: + helper.log_info("Last checkpoint is over 24 hours ago, due to API limitations this cannot be greater than 24 hours and must be reset.") + if twenty_hour_catchup: + helper.log_info("Setting from_time to 24 hours ago.") + adjusted_from_time = now - SECONDS_IN_DAY # Subtract 24 hours in seconds + helper.log_info(f"Previous Run: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(from_time))}") + helper.log_info(f"Adjusted from_time: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(adjusted_from_time))}") + return until_time, adjusted_from_time + else: + helper.log_info("Last checkpoint was over 24 hours ago, resetting the time from delta.") + # Return times as if checkpoint was none. + until_time, from_time = get_from_and_until_times( + helper, delta, five_min_offset=True + ) + return until_time, from_time return until_time, from_time def get_results(title, helper, config): loop = True counter = 1 + method = "GET" + payload = None while loop: pulled_events = [] helper.log_info("Processing page %s" % counter) start_page = timer() + response_result, response_code, response_error = get_request_data( - config.url, config.headers, helper + config.url, + method, + payload, + config.headers, + config.request_timeout, + config.read_timeout, + helper ) pulled, request_details = check_response( @@ -163,7 +223,13 @@ def get_results(title, helper, config): else: response = response_result - number_requests_per_page = len(response["data"]) + try: + number_requests_per_page = len(response["data"]) + except KeyError: + number_requests_per_page = 0 + helper.log_error(f"Invalid response: {response_result}") + break + helper.log_info(f"Number of {title} for Page: {number_requests_per_page}") for data in response["data"]: @@ -207,8 +273,25 @@ def get_results(title, helper, config): helper.log_info(f"Total Page Time: {page_time_result} seconds") loop = False elif next_url is not None: - config.url = config.api_host + next_url + # The NextID is too large to put into query parameters, so extract the value and put it in a form body. + # See: SDS-1720 + if "feed/requests" in config.url: + # Remove any additional query parameters past the request_limit. + # These cannot contain `?` so safe to use as a split separator. + config.url = config.url.split('&', 1)[0] + method = "POST" + config.headers['content-type'] = "application/x-www-form-urlencoded" + next_value = next_url.split('?', 1)[1] if '?' in next_url else '' + query_dict = parse_qs(next_value) + next_value = query_dict.get('next', [None])[0] + payload = f"next={next_value}" + helper.log_debug("payload: %s" % {payload}) + else: + config.url = config.api_host + next_url + + helper.log_debug("next url: %s" % {config.url}) helper.log_info("Finished Page %s" % counter) + counter += 1 end_page = timer() page_time = end_page - start_page @@ -222,30 +305,37 @@ def get_results(title, helper, config): class Config: api_host: str url: str + method: str headers: dict - events: dict - from_time: int - until_time: int + events: list + from_time: str + until_time: str global_email: str global_corp_api_name: str current_site: str user_agent_version: str user_agent_string: str event_ids: list + request_timeout: float + read_timeout: float def __init__( - self, - api_host=None, - url=None, - headers=None, - from_time=None, - until_time=None, - global_email=None, - global_corp_api_name=None, - current_site=None, + self, + api_host=None, + url=None, + method=None, + headers=None, + from_time=None, + until_time=None, + global_email=None, + global_corp_api_name=None, + current_site=None, + request_timeout=None, + read_timeout=None, ): self.api_host = api_host self.url = url + self.method = method self.headers = headers self.events = [] self.from_time = from_time @@ -254,7 +344,9 @@ def __init__( self.global_corp_api_name = global_corp_api_name self.current_site = current_site self.event_ids = [] - self.user_agent_version = "1.0.37" + self.request_timeout = request_timeout + self.read_timeout = read_timeout + self.user_agent_version = "1.0.38" self.user_agent_string = ( f"TA-sigsci-waf/{self.user_agent_version} " f"(PythonRequests {requests.__version__})" diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/INSTALLER b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/LICENSE b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/LICENSE new file mode 100644 index 0000000..0a64774 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/LICENSE @@ -0,0 +1,21 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/METADATA b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/METADATA new file mode 100644 index 0000000..07f4991 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/METADATA @@ -0,0 +1,69 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2023.7.22 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.6 +License-File: LICENSE + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. + + diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/RECORD new file mode 100644 index 0000000..3742222 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/RECORD @@ -0,0 +1,11 @@ +certifi-2023.7.22.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2023.7.22.dist-info/LICENSE,sha256=oC9sY4-fuE0G93ZMOrCF2K9-2luTwWbaVDEkeQd8b7A,1052 +certifi-2023.7.22.dist-info/METADATA,sha256=oyc8gd32SOVo0IGolt8-bR7FnZ9Z99GoHoGE6ACcvFA,2191 +certifi-2023.7.22.dist-info/RECORD,, +certifi-2023.7.22.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +certifi-2023.7.22.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=L_j-d0kYuA_MzA2_2hraF1ovf6KT6DTquRdV3paQwOk,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/cacert.pem,sha256=eU0Dn_3yd8BH4m8sfVj4Glhl2KDrcCSg-sEWT-pNJ88,281617 +certifi/core.py,sha256=lhewz0zFb2b4ULsQurElmloYwQoecjWzPqY67P8T7iM,4219 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/WHEEL b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/WHEEL new file mode 100644 index 0000000..5bad85f --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/top_level.txt b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi-2023.7.22.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi/__init__.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi/__init__.py index a3546f1..8ce89ce 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi/__init__.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2022.12.07" +__version__ = "2023.07.22" diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi/cacert.pem b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi/cacert.pem index df9e4e3..0212369 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi/cacert.pem +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/certifi/cacert.pem @@ -791,34 +791,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - # Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. # Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. # Label: "SecureSign RootCA11" @@ -1676,50 +1648,6 @@ HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= -----END CERTIFICATE----- -# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Label: "E-Tugra Certification Authority" -# Serial: 7667447206703254355 -# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 -# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 -# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV -BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC -aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV -BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 -Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz -MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ -BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp -em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN -ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY -B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH -D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF -Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo -q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D -k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH -fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut -dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM -ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 -zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn -rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX -U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 -Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 -XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF -Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR -HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY -GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c -77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 -+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK -vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 -FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl -yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P -AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD -y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d -NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== ------END CERTIFICATE----- - # Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center # Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center # Label: "T-TeleSec GlobalRoot Class 2" @@ -4397,73 +4325,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR -----END CERTIFICATE----- -# Issuer: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Subject: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Label: "E-Tugra Global Root CA RSA v3" -# Serial: 75951268308633135324246244059508261641472512052 -# MD5 Fingerprint: 22:be:10:f6:c2:f8:03:88:73:5f:33:29:47:28:47:a4 -# SHA1 Fingerprint: e9:a8:5d:22:14:52:1c:5b:aa:0a:b4:be:24:6a:23:8a:c9:ba:e2:a9 -# SHA256 Fingerprint: ef:66:b0:b1:0a:3c:db:9f:2e:36:48:c7:6b:d2:af:18:ea:d2:bf:e6:f1:17:65:5e:28:c4:06:0d:a1:a3:f4:c2 ------BEGIN CERTIFICATE----- -MIIF8zCCA9ugAwIBAgIUDU3FzRYilZYIfrgLfxUGNPt5EDQwDQYJKoZIhvcNAQEL -BQAwgYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUt -VHVncmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYw -JAYDVQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIFJTQSB2MzAeFw0yMDAzMTgw -OTA3MTdaFw00NTAzMTIwOTA3MTdaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMG -QW5rYXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1 -Z3JhIFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBD -QSBSU0EgdjMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCiZvCJt3J7 -7gnJY9LTQ91ew6aEOErxjYG7FL1H6EAX8z3DeEVypi6Q3po61CBxyryfHUuXCscx -uj7X/iWpKo429NEvx7epXTPcMHD4QGxLsqYxYdE0PD0xesevxKenhOGXpOhL9hd8 -7jwH7eKKV9y2+/hDJVDqJ4GohryPUkqWOmAalrv9c/SF/YP9f4RtNGx/ardLAQO/ -rWm31zLZ9Vdq6YaCPqVmMbMWPcLzJmAy01IesGykNz709a/r4d+ABs8qQedmCeFL -l+d3vSFtKbZnwy1+7dZ5ZdHPOrbRsV5WYVB6Ws5OUDGAA5hH5+QYfERaxqSzO8bG -wzrwbMOLyKSRBfP12baqBqG3q+Sx6iEUXIOk/P+2UNOMEiaZdnDpwA+mdPy70Bt4 -znKS4iicvObpCdg604nmvi533wEKb5b25Y08TVJ2Glbhc34XrD2tbKNSEhhw5oBO -M/J+JjKsBY04pOZ2PJ8QaQ5tndLBeSBrW88zjdGUdjXnXVXHt6woq0bM5zshtQoK -5EpZ3IE1S0SVEgpnpaH/WwAH0sDM+T/8nzPyAPiMbIedBi3x7+PmBvrFZhNb/FAH -nnGGstpvdDDPk1Po3CLW3iAfYY2jLqN4MpBs3KwytQXk9TwzDdbgh3cXTJ2w2Amo -DVf3RIXwyAS+XF1a4xeOVGNpf0l0ZAWMowIDAQABo2MwYTAPBgNVHRMBAf8EBTAD -AQH/MB8GA1UdIwQYMBaAFLK0ruYt9ybVqnUtdkvAG1Mh0EjvMB0GA1UdDgQWBBSy -tK7mLfcm1ap1LXZLwBtTIdBI7zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEL -BQADggIBAImocn+M684uGMQQgC0QDP/7FM0E4BQ8Tpr7nym/Ip5XuYJzEmMmtcyQ -6dIqKe6cLcwsmb5FJ+Sxce3kOJUxQfJ9emN438o2Fi+CiJ+8EUdPdk3ILY7r3y18 -Tjvarvbj2l0Upq7ohUSdBm6O++96SmotKygY/r+QLHUWnw/qln0F7psTpURs+APQ -3SPh/QMSEgj0GDSz4DcLdxEBSL9htLX4GdnLTeqjjO/98Aa1bZL0SmFQhO3sSdPk -vmjmLuMxC1QLGpLWgti2omU8ZgT5Vdps+9u1FGZNlIM7zR6mK7L+d0CGq+ffCsn9 -9t2HVhjYsCxVYJb6CH5SkPVLpi6HfMsg2wY+oF0Dd32iPBMbKaITVaA9FCKvb7jQ -mhty3QUBjYZgv6Rn7rWlDdF/5horYmbDB7rnoEgcOMPpRfunf/ztAmgayncSd6YA -VSgU7NbHEqIbZULpkejLPoeJVF3Zr52XnGnnCv8PWniLYypMfUeUP95L6VPQMPHF -9p5J3zugkaOj/s1YzOrfr28oO6Bpm4/srK4rVJ2bBLFHIK+WEj5jlB0E5y67hscM -moi/dkfv97ALl2bSRM9gUgfh1SxKOidhd8rXj+eHDjD/DLsE4mHDosiXYY60MGo8 -bcIHX0pzLz/5FooBZu+6kcpSV3uu1OYP3Qt6f4ueJiDPO++BcYNZ ------END CERTIFICATE----- - -# Issuer: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Subject: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Label: "E-Tugra Global Root CA ECC v3" -# Serial: 218504919822255052842371958738296604628416471745 -# MD5 Fingerprint: 46:bc:81:bb:f1:b5:1e:f7:4b:96:bc:14:e2:e7:27:64 -# SHA1 Fingerprint: 8a:2f:af:57:53:b1:b0:e6:a1:04:ec:5b:6a:69:71:6d:f6:1c:e2:84 -# SHA256 Fingerprint: 87:3f:46:85:fa:7f:56:36:25:25:2e:6d:36:bc:d7:f1:6f:c2:49:51:f2:64:e4:7e:1b:95:4f:49:08:cd:ca:13 ------BEGIN CERTIFICATE----- -MIICpTCCAiqgAwIBAgIUJkYZdzHhT28oNt45UYbm1JeIIsEwCgYIKoZIzj0EAwMw -gYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUtVHVn -cmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYwJAYD -VQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIEVDQyB2MzAeFw0yMDAzMTgwOTQ2 -NThaFw00NTAzMTIwOTQ2NThaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMGQW5r -YXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1Z3Jh -IFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBDQSBF -Q0MgdjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASOmCm/xxAeJ9urA8woLNheSBkQ -KczLWYHMjLiSF4mDKpL2w6QdTGLVn9agRtwcvHbB40fQWxPa56WzZkjnIZpKT4YK -fWzqTTKACrJ6CZtpS5iB4i7sAnCWH/31Rs7K3IKjYzBhMA8GA1UdEwEB/wQFMAMB -Af8wHwYDVR0jBBgwFoAU/4Ixcj75xGZsrTie0bBRiKWQzPUwHQYDVR0OBBYEFP+C -MXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNp -ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6 -7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx -vmjkI6TZraE3 ------END CERTIFICATE----- - # Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. # Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. # Label: "Security Communication RootCA3" @@ -4525,3 +4386,250 @@ BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu 9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= -----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/charset_normalizer-2.0.12.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/charset_normalizer-2.0.12.dist-info/RECORD index d5a8e21..6a48c6c 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/charset_normalizer-2.0.12.dist-info/RECORD +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/charset_normalizer-2.0.12.dist-info/RECORD @@ -3,6 +3,7 @@ charset_normalizer-2.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCr charset_normalizer-2.0.12.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 charset_normalizer-2.0.12.dist-info/METADATA,sha256=eX-U3s7nb6wcvXZFyM1mdBf1yz4I0msVBgNvLEscAbo,11713 charset_normalizer-2.0.12.dist-info/RECORD,, +charset_normalizer-2.0.12.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 charset_normalizer-2.0.12.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 charset_normalizer-2.0.12.dist-info/entry_points.txt,sha256=5AJq_EPtGGUwJPgQLnBZfbVr-FYCIwT0xP7dIEZO3NI,77 charset_normalizer-2.0.12.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/charset_normalizer-2.0.12.dist-info/REQUESTED b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/charset_normalizer-2.0.12.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/INSTALLER b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/LICENSE b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/METADATA b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/METADATA new file mode 100644 index 0000000..6272503 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/METADATA @@ -0,0 +1,134 @@ +Metadata-Version: 2.1 +Name: importlib-metadata +Version: 6.7.0 +Summary: Read metadata from Python packages +Home-page: https://github.com/python/importlib_metadata +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: zipp (>=0.5) +Requires-Dist: typing-extensions (>=3.6.4) ; python_version < "3.8" +Provides-Extra: docs +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx-lint ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' +Provides-Extra: perf +Requires-Dist: ipython ; extra == 'perf' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' +Requires-Dist: pytest-ruff ; extra == 'testing' +Requires-Dist: packaging ; extra == 'testing' +Requires-Dist: pyfakefs ; extra == 'testing' +Requires-Dist: flufl.flake8 ; extra == 'testing' +Requires-Dist: pytest-perf (>=0.9.2) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg + :target: https://pypi.org/project/importlib_metadata + +.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg + +.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg + :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest + :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2023-informational + :target: https://blog.jaraco.com/skeleton + +.. image:: https://tidelift.com/badges/package/pypi/importlib-metadata + :target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme + +Library to access the metadata for a Python package. + +This package supplies third-party access to the functionality of +`importlib.metadata `_ +including improvements added to subsequent Python versions. + + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_metadata + - stdlib + * - 6.5 + - 3.12 + * - 4.13 + - 3.11 + * - 4.6 + - 3.10 + * - 1.4 + - 3.8 + + +Usage +===== + +See the `online documentation `_ +for usage details. + +`Finder authors +`_ can +also add support for custom package installers. See the above documentation +for details. + + +Caveats +======= + +This project primarily supports third-party packages installed by PyPA +tools (or other conforming packages). It does not support: + +- Packages in the stdlib. +- Packages installed without metadata. + +Project details +=============== + + * Project home: https://github.com/python/importlib_metadata + * Report bugs at: https://github.com/python/importlib_metadata/issues + * Code hosting: https://github.com/python/importlib_metadata + * Documentation: https://importlib-metadata.readthedocs.io/ + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/RECORD new file mode 100644 index 0000000..471778f --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/RECORD @@ -0,0 +1,16 @@ +importlib_metadata-6.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_metadata-6.7.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +importlib_metadata-6.7.0.dist-info/METADATA,sha256=JDrzuuLRE3CxIRXLeXdZGGFDrVlEXUvt-chm0-s-TtI,4878 +importlib_metadata-6.7.0.dist-info/RECORD,, +importlib_metadata-6.7.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +importlib_metadata-6.7.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 +importlib_metadata/__init__.py,sha256=MQx_tU_lZg-7U91wdrlrsDt0MGPXkpraLevB8LO1NNc,30724 +importlib_metadata/_adapters.py,sha256=i8S6Ib1OQjcILA-l4gkzktMZe18TaeUNI49PLRp6OBU,2454 +importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743 +importlib_metadata/_compat.py,sha256=xaiD8pwYYPCWkVgR30411iT4OmLbSbSAigzhp0nTROw,1735 +importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895 +importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068 +importlib_metadata/_meta.py,sha256=I2AuaUMr5a6cTdZleV9WpyqUCSooqqV-zSzr1qn7FMw,1615 +importlib_metadata/_py39compat.py,sha256=2Tk5twb_VgLCY-1NEAQjdZp_S9OFMC-pUzP2isuaPsQ,1098 +importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166 +importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/WHEEL b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/WHEEL new file mode 100644 index 0000000..1f37c02 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/top_level.txt b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/top_level.txt new file mode 100644 index 0000000..bbb0754 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata-6.7.0.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_metadata diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata/__init__.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata/__init__.py index 281cfb0..8147d2f 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata/__init__.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata/__init__.py @@ -13,7 +13,6 @@ import functools import itertools import posixpath -import contextlib import collections from . import _adapters, _meta, _py39compat @@ -974,13 +973,42 @@ def _top_level_declared(dist): return (dist.read_text('top_level.txt') or '').split() +def _topmost(name: PackagePath) -> Optional[str]: + """ + Return the top-most parent as long as there is a parent. + """ + top, *rest = name.parts + return top if rest else None + + +def _get_toplevel_name(name: PackagePath) -> str: + """ + Infer a possibly importable module name from a name presumed on + sys.path. + + >>> _get_toplevel_name(PackagePath('foo.py')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo.pyc')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo/__init__.py')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo.pth')) + 'foo.pth' + >>> _get_toplevel_name(PackagePath('foo.dist-info')) + 'foo.dist-info' + """ + return _topmost(name) or ( + # python/typeshed#10328 + inspect.getmodulename(name) # type: ignore + or str(name) + ) + + def _top_level_inferred(dist): - opt_names = { - f.parts[0] if len(f.parts) > 1 else inspect.getmodulename(f) - for f in always_iterable(dist.files) - } + opt_names = set(map(_get_toplevel_name, always_iterable(dist.files))) - @pass_none def importable_name(name): return '.' not in name diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata/_compat.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata/_compat.py index 638e779..b7abd09 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata/_compat.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/importlib_metadata/_compat.py @@ -56,14 +56,6 @@ class NullFinder: def find_spec(*args, **kwargs): return None - # In Python 2, the import system requires finders - # to have a find_module() method, but this usage - # is deprecated in Python 3 in favor of find_spec(). - # For the purposes of this finder (i.e. being present - # on sys.meta_path but having no other import - # system functionality), the two methods are identical. - find_module = find_spec - def pypy_partial(val): """ diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsl-0.2.4.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsl-0.2.4.dist-info/RECORD index 3487a2e..3f15993 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsl-0.2.4.dist-info/RECORD +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsl-0.2.4.dist-info/RECORD @@ -3,7 +3,7 @@ jsl-0.2.4.dist-info/LICENSE,sha256=SOcEeWBevO0ulaPJ1ZbwQHJrPKCqwjXQ6tUAo_LrYmE,1 jsl-0.2.4.dist-info/METADATA,sha256=Hoxdaj4BiIHCDAJfABf-ZKi-mpICjqGFzCXuG6Ve-ag,3655 jsl-0.2.4.dist-info/RECORD,, jsl-0.2.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jsl-0.2.4.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +jsl-0.2.4.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92 jsl-0.2.4.dist-info/top_level.txt,sha256=5pe24rVnpqKlN1x1SAB6vpgck0EQXzb1HD-STjLrCXE,4 jsl/__init__.py,sha256=Hav-h60_x4Z_1-SPwtgXo6HFmv2TAay_3xWfootT8Hg,536 jsl/_compat/__init__.py,sha256=Wfz6h02kZsiVvzNdZALq2RTa7XpEbeAjeiE8CFTzC8Y,2118 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsl-0.2.4.dist-info/WHEEL b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsl-0.2.4.dist-info/WHEEL index 1f37c02..7e68873 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsl-0.2.4.dist-info/WHEEL +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsl-0.2.4.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) +Generator: bdist_wheel (0.41.2) Root-Is-Purelib: true Tag: py3-none-any diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/INSTALLER b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/LICENSE b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/METADATA b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/METADATA new file mode 100644 index 0000000..b50df98 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/METADATA @@ -0,0 +1,361 @@ +Metadata-Version: 2.1 +Name: jsonpath-ng +Version: 1.6.0 +Summary: A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming. +Home-page: https://github.com/h2non/jsonpath-ng +Author: Tomas Aparicio +Author-email: tomas@aparicio.me +License: Apache 2.0 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +License-File: LICENSE +Requires-Dist: ply + +Python JSONPath Next-Generation |Build Status| |PyPI| +===================================================== + +A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic +and binary comparison operators, as defined in the original `JSONPath proposal`_. + +This packages merges both `jsonpath-rw`_ and `jsonpath-rw-ext`_ and +provides several AST API enhancements, such as the ability to update or removes nodes in the tree. + +About +----- + +This library provides a robust and significantly extended implementation +of JSONPath for Python. It is tested with CPython 2.6, 2.7 & 3.x. + +This library differs from other JSONPath implementations in that it is a +full *language* implementation, meaning the JSONPath expressions are +first class objects, easy to analyze, transform, parse, print, and +extend. + +Quick Start +----------- + +To install, use pip: + +.. code:: bash + + $ pip install --upgrade jsonpath-ng + + +Usage +----- + +Basic examples: + +.. code:: python + + $ python + + >>> from jsonpath_ng import jsonpath, parse + + # A robust parser, not just a regex. (Makes powerful extensions possible; see below) + >>> jsonpath_expr = parse('foo[*].baz') + + # Extracting values is easy + >>> [match.value for match in jsonpath_expr.find({'foo': [{'baz': 1}, {'baz': 2}]})] + [1, 2] + + # Matches remember where they came from + >>> [str(match.full_path) for match in jsonpath_expr.find({'foo': [{'baz': 1}, {'baz': 2}]})] + ['foo.[0].baz', 'foo.[1].baz'] + + # And this can be useful for automatically providing ids for bits of data that do not have them (currently a global switch) + >>> jsonpath.auto_id_field = 'id' + >>> [match.value for match in parse('foo[*].id').find({'foo': [{'id': 'bizzle'}, {'baz': 3}]})] + ['foo.bizzle', 'foo.[1]'] + + # A handy extension: named operators like `parent` + >>> [match.value for match in parse('a.*.b.`parent`.c').find({'a': {'x': {'b': 1, 'c': 'number one'}, 'y': {'b': 2, 'c': 'number two'}}})] + ['number two', 'number one'] + + # You can also build expressions directly quite easily + >>> from jsonpath_ng.jsonpath import Fields + >>> from jsonpath_ng.jsonpath import Slice + + >>> jsonpath_expr_direct = Fields('foo').child(Slice('*')).child(Fields('baz')) # This is equivalent + + +Using the extended parser: + +.. code:: python + + $ python + + >>> from jsonpath_ng.ext import parse + + # A robust parser, not just a regex. (Makes powerful extensions possible; see below) + >>> jsonpath_expr = parse('foo[*].baz') + + +JSONPath Syntax +--------------- + +The JSONPath syntax supported by this library includes some additional +features and omits some problematic features (those that make it +unportable). In particular, some new operators such as ``|`` and +``where`` are available, and parentheses are used for grouping not for +callbacks into Python, since with these changes the language is not +trivially associative. Also, fields may be quoted whether or not they +are contained in brackets. + +Atomic expressions: + ++-----------------------+---------------------------------------------------------------------------------------------+ +| Syntax | Meaning | ++=======================+=============================================================================================+ +| ``$`` | The root object | ++-----------------------+---------------------------------------------------------------------------------------------+ +| ```this``` | The "current" object. | ++-----------------------+---------------------------------------------------------------------------------------------+ +| ```foo``` | More generally, this syntax allows "named operators" to extend JSONPath is arbitrary ways | ++-----------------------+---------------------------------------------------------------------------------------------+ +| *field* | Specified field(s), described below | ++-----------------------+---------------------------------------------------------------------------------------------+ +| ``[`` *field* ``]`` | Same as *field* | ++-----------------------+---------------------------------------------------------------------------------------------+ +| ``[`` *idx* ``]`` | Array access, described below (this is always unambiguous with field access) | ++-----------------------+---------------------------------------------------------------------------------------------+ + +Jsonpath operators: + ++-------------------------------------+------------------------------------------------------------------------------------+ +| Syntax | Meaning | ++=====================================+====================================================================================+ +| *jsonpath1* ``.`` *jsonpath2* | All nodes matched by *jsonpath2* starting at any node matching *jsonpath1* | ++-------------------------------------+------------------------------------------------------------------------------------+ +| *jsonpath* ``[`` *whatever* ``]`` | Same as *jsonpath*\ ``.``\ *whatever* | ++-------------------------------------+------------------------------------------------------------------------------------+ +| *jsonpath1* ``..`` *jsonpath2* | All nodes matched by *jsonpath2* that descend from any node matching *jsonpath1* | ++-------------------------------------+------------------------------------------------------------------------------------+ +| *jsonpath1* ``where`` *jsonpath2* | Any nodes matching *jsonpath1* with a child matching *jsonpath2* | ++-------------------------------------+------------------------------------------------------------------------------------+ +| *jsonpath1* ``|`` *jsonpath2* | Any nodes matching the union of *jsonpath1* and *jsonpath2* | ++-------------------------------------+------------------------------------------------------------------------------------+ + +Field specifiers ( *field* ): + ++-------------------------+-------------------------------------------------------------------------------------+ +| Syntax | Meaning | ++=========================+=====================================================================================+ +| ``fieldname`` | the field ``fieldname`` (from the "current" object) | ++-------------------------+-------------------------------------------------------------------------------------+ +| ``"fieldname"`` | same as above, for allowing special characters in the fieldname | ++-------------------------+-------------------------------------------------------------------------------------+ +| ``'fieldname'`` | ditto | ++-------------------------+-------------------------------------------------------------------------------------+ +| ``*`` | any field | ++-------------------------+-------------------------------------------------------------------------------------+ +| *field* ``,`` *field* | either of the named fields (you can always build equivalent jsonpath using ``|``) | ++-------------------------+-------------------------------------------------------------------------------------+ + +Array specifiers ( *idx* ): + ++-----------------------------------------+---------------------------------------------------------------------------------------+ +| Syntax | Meaning | ++=========================================+=======================================================================================+ +| ``[``\ *n*\ ``]`` | array index (may be comma-separated list) | ++-----------------------------------------+---------------------------------------------------------------------------------------+ +| ``[``\ *start*\ ``?:``\ *end*\ ``?]`` | array slicing (note that *step* is unimplemented only due to lack of need thus far) | ++-----------------------------------------+---------------------------------------------------------------------------------------+ +| ``[*]`` | any array index | ++-----------------------------------------+---------------------------------------------------------------------------------------+ + +Programmatic JSONPath +--------------------- + +If you are programming in Python and would like a more robust way to +create JSONPath expressions that does not depend on a parser, it is very +easy to do so directly, and here are some examples: + +- ``Root()`` +- ``Slice(start=0, end=None, step=None)`` +- ``Fields('foo', 'bar')`` +- ``Index(42)`` +- ``Child(Fields('foo'), Index(42))`` +- ``Where(Slice(), Fields('subfield'))`` +- ``Descendants(jsonpath, jsonpath)`` + + +Extras +------ + +- *Path data*: The result of ``JsonPath.find`` provide detailed context + and path data so it is easy to traverse to parent objects, print full + paths to pieces of data, and generate automatic ids. +- *Automatic Ids*: If you set ``jsonpath_ng.auto_id_field`` to a value + other than None, then for any piece of data missing that field, it + will be replaced by the JSONPath to it, giving automatic unique ids + to any piece of data. These ids will take into account any ids + already present as well. +- *Named operators*: Instead of using ``@`` to reference the currently + object, this library uses ```this```. In general, any string + contained in backquotes can be made to be a new operator, currently + by extending the library. + + +Extensions +---------- + +To use the extensions below you must import from `jsonpath_ng.ext`. + ++--------------+-----------------------------------------------+ +| name | Example | ++==============+===============================================+ +| len | - ``$.objects.`len``` | ++--------------+-----------------------------------------------+ +| sub | - ``$.field.`sub(/foo\\\\+(.*)/, \\\\1)``` | +| | - ``$.field.`sub(/regex/, replacement)``` | ++--------------+-----------------------------------------------+ +| split | - ``$.field.`split(+, 2, -1)``` | +| | - ``$.field.`split(sep, segement, maxsplit)```| ++--------------+-----------------------------------------------+ +| sorted | - ``$.objects.`sorted``` | +| | - ``$.objects[\\some_field]`` | +| | - ``$.objects[\\some_field,/other_field]`` | ++--------------+-----------------------------------------------+ +| filter | - ``$.objects[?(@some_field > 5)]`` | +| | - ``$.objects[?some_field = "foobar"]`` | +| | - ``$.objects[?some_field =~ "foobar"]`` | +| | - ``$.objects[?some_field > 5 & other < 2]`` | +| | | +| | Supported operators: | +| | - Equality: ==, =, != | +| | - Comparison: >, >=, <, <= | +| | - Regex match: =~ | +| | | +| | Combine multiple criteria with '&'. | +| | | +| | Properties can only be compared to static | +| | values. | ++--------------+-----------------------------------------------+ +| arithmetic | - ``$.foo + "_" + $.bar`` | +| (-+*/) | - ``$.foo * 12`` | +| | - ``$.objects[*].cow + $.objects[*].cat`` | ++--------------+-----------------------------------------------+ + +About arithmetic and string +--------------------------- + +Operations are done with python operators and allows types that python +allows, and return [] if the operation can be done due to incompatible types. + +When operators are used, a jsonpath must be be fully defined otherwise +jsonpath-rw-ext can't known if the expression is a string or a jsonpath field, +in this case it will choice string as type. + +Example with data:: + + { + 'cow': 'foo', + 'fish': 'bar' + } + +| ``cow + fish`` returns ``cowfish`` +| ``$.cow + $.fish`` returns ``foobar`` +| ``$.cow + "_" + $.fish`` returns ``foo_bar`` +| ``$.cow + "_" + fish`` returns ``foo_fish`` + +About arithmetic and list +------------------------- + +Arithmetic can be used against two lists if they have the same size. + +Example with data:: + + {'objects': [ + {'cow': 2, 'cat': 3}, + {'cow': 4, 'cat': 6} + ]} + +| ``$.objects[\*].cow + $.objects[\*].cat`` returns ``[6, 9]`` + +More to explore +--------------- + +There are way too many JSONPath implementations out there to discuss. +Some are robust, some are toy projects that still work fine, some are +exercises. There will undoubtedly be many more. This one is made for use +in released, maintained code, and in particular for programmatic access +to the abstract syntax and extension. But JSONPath at its simplest just +isn't that complicated, so you can probably use any of them +successfully. Why not this one? + +The original proposal, as far as I know: + +- `JSONPath - XPath for + JSON `__ by Stefan Goessner. + +Other examples +-------------- + +Loading json data from file + +.. code:: python + + import json + d = json.loads('{"foo": [{"baz": 1}, {"baz": 2}]}') + # or + with open('myfile.json') as f: + d = json.load(f) + +Special note about PLY and docstrings +------------------------------------- + +The main parsing toolkit underlying this library, +`PLY `__, does not work with docstrings +removed. For example, ``PYTHONOPTIMIZE=2`` and ``python -OO`` will both +cause a failure. + +Contributors +------------ + +This package is authored and maintained by: + +- `Kenn Knowles `__ + (`@kennknowles `__) +- `Tomas Aparicio ` + +with the help of patches submitted by `these contributors `__. + +Copyright and License +--------------------- + +Copyright 2013 - Kenneth Knowles + +Copyright 2017 - Tomas Aparicio + +Licensed under the Apache License, Version 2.0 (the "License"); you may +not use this file except in compliance with the License. You may obtain +a copy of the License at + +:: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +.. _`JSONPath proposal`: http://goessner.net/articles/JsonPath/ +.. _`jsonpath-rw`: https://github.com/kennknowles/python-jsonpath-rw +.. _`jsonpath-rw-ext`: https://pypi.python.org/pypi/jsonpath-rw-ext/ + +.. |PyPi downloads| image:: https://pypip.in/d/jsonpath-ng/badge.png + :target: https://pypi.python.org/pypi/jsonpath-ng +.. |Build Status| image:: https://github.com/h2non/jsonpath-ng/actions/workflows/ci.yml/badge.svg + :target: https://github.com/h2non/jsonpath-ng/actions/workflows/ci.yml +.. |PyPI| image:: https://img.shields.io/pypi/v/jsonpath-ng.svg?maxAge=2592000?style=flat-square + :target: https://pypi.python.org/pypi/jsonpath-ng diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/RECORD new file mode 100644 index 0000000..46ee473 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/RECORD @@ -0,0 +1,21 @@ +../../bin/jsonpath_ng,sha256=wyLTfFPfO_g7cOZuZ25Mq6trR-K89aSBqA5id8661_4,237 +jsonpath_ng-1.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jsonpath_ng-1.6.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +jsonpath_ng-1.6.0.dist-info/METADATA,sha256=u0MUzDAHKwIvzlMX6f-Njo7FBdMYsAZtWBstfaCdoKw,17317 +jsonpath_ng-1.6.0.dist-info/RECORD,, +jsonpath_ng-1.6.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +jsonpath_ng-1.6.0.dist-info/entry_points.txt,sha256=VRPM3LkYSvQ6p1e61jOm5_NZHTHCZ2xv-2Sbh_qO_68,69 +jsonpath_ng-1.6.0.dist-info/top_level.txt,sha256=SeYdUWfJ4KSDQbd2GnE6BOd8vMG7Lta9nbIfT2N7xbE,12 +jsonpath_ng/__init__.py,sha256=vY-YLsf5LrsrvbrIWoP4BszVNiHeKSN7BbERlYGzoYM,116 +jsonpath_ng/bin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jsonpath_ng/bin/jsonpath.py,sha256=oqMf4yidMF9WoAC22ko3h8K8-jhkarP8wpp52DKmWmc,2151 +jsonpath_ng/exceptions.py,sha256=WbmwVjhCtpqp0enN3Sd4ymlZGP8ZZUkvT9uq7PXiEq4,146 +jsonpath_ng/ext/__init__.py,sha256=oxAHiz1-xcRsDX_KGDCiBh6LGP2zHZKzvI3QxrFTh6E,605 +jsonpath_ng/ext/arithmetic.py,sha256=CvRF0dnFWu7V1v2XrQBjymPJGrxYWIr4ff4efhQQOhE,2381 +jsonpath_ng/ext/filter.py,sha256=Gbzf-6pIS7H_WIdcB8VuQzm3i9py28KwjQdxCkTXux8,3907 +jsonpath_ng/ext/iterable.py,sha256=OIBuVDGbK4Igvd2rZUeiG3UoNIdf4oaLEELOSDNnYZY,2984 +jsonpath_ng/ext/parser.py,sha256=zeqaWrLDTr4kIP-RrPaBZtZqnf6t27fHfIVjwCiS0RU,5286 +jsonpath_ng/ext/string.py,sha256=IA3gAJg3ykJiX7-gLYoBSd0rTYkJd1jOfIzJ3mHNpyQ,3261 +jsonpath_ng/jsonpath.py,sha256=hMxBPOfr8gNA10gnEi-KfrDPkU5f6uhX0QzGtzSU5B8,26480 +jsonpath_ng/lexer.py,sha256=kax39qjPz4N3b1kqMnY9SdPYlcjDGVYjj0Rx8jJR1jk,5349 +jsonpath_ng/parser.py,sha256=FaAiHGfU4rnL9kJ8n0bGT79CFcF10KDPYkCJ-UuFxdM,5867 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/WHEEL b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/WHEEL new file mode 100644 index 0000000..1f37c02 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/entry_points.txt b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/entry_points.txt new file mode 100644 index 0000000..e1985ff --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +jsonpath_ng = jsonpath_ng.bin.jsonpath:entry_point diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/top_level.txt b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/top_level.txt new file mode 100644 index 0000000..30b75c5 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng-1.6.0.dist-info/top_level.txt @@ -0,0 +1 @@ +jsonpath_ng diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/__init__.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/__init__.py index b19fd11..e28943f 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/__init__.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/__init__.py @@ -3,4 +3,4 @@ # Current package version -__version__ = '1.5.3' +__version__ = '1.6.0' diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/ext/filter.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/ext/filter.py index d82f004..16210a4 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/ext/filter.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/ext/filter.py @@ -13,7 +13,6 @@ import operator import re -from six import moves from .. import JSONPath, DatumInContext, Index @@ -49,7 +48,7 @@ def find(self, datum): return [] return [DatumInContext(datum.value[i], path=Index(i), context=datum) - for i in moves.range(0, len(datum.value)) + for i in range(0, len(datum.value)) if (len(self.expressions) == len(list(filter(lambda x: x.find(datum.value[i]), self.expressions))))] diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/ext/string.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/ext/string.py index 80ed890..1cd2763 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/ext/string.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/ext/string.py @@ -15,9 +15,9 @@ from .. import DatumInContext, This -SUB = re.compile("sub\(/(.*)/,\s+(.*)\)") -SPLIT = re.compile("split\((.),\s+(\d+),\s+(\d+|-1)\)") -STR = re.compile("str\(\)") +SUB = re.compile(r"sub\(/(.*)/,\s+(.*)\)") +SPLIT = re.compile(r"split\((.),\s+(\d+),\s+(\d+|-1)\)") +STR = re.compile(r"str\(\)") class DefintionInvalid(Exception): diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/jsonpath.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/jsonpath.py index f4f9d4b..d506302 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/jsonpath.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/jsonpath.py @@ -1,9 +1,9 @@ -from __future__ import unicode_literals, print_function, absolute_import, division, generators, nested_scopes +from __future__ import (absolute_import, division, generators, nested_scopes, + print_function, unicode_literals) + import logging -import six -from six.moves import xrange from itertools import * # noqa -from .exceptions import JSONPathError +from jsonpath_ng.lexer import JsonPathLexer # Get logger name logger = logging.getLogger(__name__) @@ -220,6 +220,9 @@ def __repr__(self): def __eq__(self, other): return isinstance(other, Root) + def __hash__(self): + return hash('$') + class This(JSONPath): """ @@ -244,6 +247,9 @@ def __repr__(self): def __eq__(self, other): return isinstance(other, This) + def __hash__(self): + return hash('this') + class Child(JSONPath): """ @@ -302,6 +308,9 @@ def __str__(self): def __repr__(self): return '%s(%r, %r)' % (self.__class__.__name__, self.left, self.right) + def __hash__(self): + return hash((self.left, self.right)) + class Parent(JSONPath): """ @@ -323,6 +332,9 @@ def __str__(self): def __repr__(self): return 'Parent()' + def __hash__(self): + return hash('parent') + class Where(JSONPath): """ @@ -357,6 +369,9 @@ def __str__(self): def __eq__(self, other): return isinstance(other, Where) and other.left == self.left and other.right == self.right + def __hash__(self): + return hash((self.left, self.right)) + class Descendants(JSONPath): """ JSONPath that matches first the left expression then any descendant @@ -469,6 +484,9 @@ def __eq__(self, other): def __repr__(self): return '%s(%r, %r)' % (self.__class__.__name__, self.left, self.right) + def __hash__(self): + return hash((self.left, self.right)) + class Union(JSONPath): """ @@ -490,6 +508,12 @@ def is_singular(self): def find(self, data): return self.left.find(data) + self.right.find(data) + def __eq__(self, other): + return isinstance(other, Union) and self.left == other.left and self.right == other.right + + def __hash__(self): + return hash((self.left, self.right)) + class Intersect(JSONPath): """ JSONPath for bits that match *both* patterns. @@ -511,6 +535,12 @@ def is_singular(self): def find(self, data): raise NotImplementedError() + def __eq__(self, other): + return isinstance(other, Intersect) and self.left == other.left and self.right == other.right + + def __hash__(self): + return hash((self.left, self.right)) + class Fields(JSONPath): """ @@ -588,7 +618,14 @@ def filter(self, fn, data): return data def __str__(self): - return ','.join(map(str, self.fields)) + # If any JsonPathLexer.literals are included in field name need quotes + # This avoids unnecessary quotes to keep strings short. + # Test each field whether it contains a literal and only then add quotes + # The test loops over all literals, could possibly optimize to short circuit if one found + fields_as_str = ("'" + str(f) + "'" if any([l in f for l in JsonPathLexer.literals]) else + str(f) for f in self.fields) + return ','.join(fields_as_str) + def __repr__(self): return '%s(%s)' % (self.__class__.__name__, ','.join(map(repr, self.fields))) @@ -596,6 +633,9 @@ def __repr__(self): def __eq__(self, other): return isinstance(other, Fields) and tuple(self.fields) == tuple(other.fields) + def __hash__(self): + return hash(tuple(self.fields)) + class Index(JSONPath): """ @@ -662,6 +702,9 @@ def _pad_value(self, value): pad = self.index - len(value) + 1 value += [{} for __ in range(pad)] + def __hash__(self): + return hash(self.index) + class Slice(JSONPath): """ @@ -700,13 +743,13 @@ def find(self, datum): return [] # Here's the hack. If it is a dictionary or some kind of constant, # put it in a single-element list - if (isinstance(datum.value, dict) or isinstance(datum.value, six.integer_types) or isinstance(datum.value, six.string_types)): + if (isinstance(datum.value, dict) or isinstance(datum.value, int) or isinstance(datum.value, str)): return self.find(DatumInContext([datum.value], path=datum.path, context=datum.context)) # Some iterators do not support slicing but we can still # at least work for '*' - if self.start == None and self.end == None and self.step == None: - return [DatumInContext(datum.value[i], path=Index(i), context=datum) for i in xrange(0, len(datum.value))] + if self.start is None and self.end is None and self.step is None: + return [DatumInContext(datum.value[i], path=Index(i), context=datum) for i in range(0, len(datum.value))] else: return [DatumInContext(datum.value[i], path=Index(i), context=datum) for i in range(0, len(datum.value))[self.start:self.end:self.step]] @@ -728,7 +771,7 @@ def filter(self, fn, data): return data def __str__(self): - if self.start == None and self.end == None and self.step == None: + if self.start is None and self.end is None and self.step is None: return '[*]' else: return '[%s%s%s]' % (self.start or '', @@ -741,6 +784,9 @@ def __repr__(self): def __eq__(self, other): return isinstance(other, Slice) and other.start == self.start and self.end == other.end and other.step == self.step + def __hash__(self): + return hash((self.start, self.end, self.step)) + def _create_list_key(dict_): """ diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/lexer.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/lexer.py index 4014933..2fc09c1 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/lexer.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/lexer.py @@ -16,7 +16,7 @@ class JsonPathLexer(object): def __init__(self, debug=False): self.debug = debug - if self.__doc__ == None: + if self.__doc__ is None: raise JsonPathLexerError('Docstrings have been removed! By design of PLY, jsonpath-rw requires docstrings. You must not use PYTHONOPTIMIZE=2 or python -OO.') def tokenize(self, string): @@ -31,7 +31,8 @@ def tokenize(self, string): while True: t = new_lexer.token() - if t is None: break + if t is None: + break t.col = t.lexpos - new_lexer.latest_newline yield t diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/parser.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/parser.py index 72333c2..7c311f0 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/parser.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_ng/parser.py @@ -5,6 +5,7 @@ generators, nested_scopes, ) +import logging import sys import os.path @@ -55,7 +56,8 @@ def parse_token_stream(self, token_iterator, start_symbol='jsonpath'): parsing_table_module = '_'.join([module_name, start_symbol, 'parsetab']) - # And we regenerate the parse table every time; it doesn't actually take that long! + # And we regenerate the parse table every time; + # it doesn't actually take that long! new_parser = ply.yacc.yacc(module=self, debug=self.debug, tabmodule = parsing_table_module, diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_rw-1.4.0.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_rw-1.4.0.dist-info/RECORD index 732ea70..035286e 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_rw-1.4.0.dist-info/RECORD +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_rw-1.4.0.dist-info/RECORD @@ -3,7 +3,7 @@ jsonpath_rw-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7z jsonpath_rw-1.4.0.dist-info/METADATA,sha256=YpILgtomSGnBnMkWAnoiCjCltr_Xg-NRUASouddqa5I,13258 jsonpath_rw-1.4.0.dist-info/RECORD,, jsonpath_rw-1.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jsonpath_rw-1.4.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +jsonpath_rw-1.4.0.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92 jsonpath_rw-1.4.0.dist-info/entry_points.txt,sha256=rhhrY2M1qcBqXZdcsE7b3WzZYR9K7jqG8JZ_BQBj3OA,70 jsonpath_rw-1.4.0.dist-info/top_level.txt,sha256=ZkYenrz7C0bQMlN3Nn0VYMWQc2Vm5zfcGQTOJowecEA,12 jsonpath_rw/__init__.py,sha256=ptcHrCa_lKTgP_OLUyEeiFSfolhQG5wsxzAckU7OZSY,73 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_rw-1.4.0.dist-info/WHEEL b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_rw-1.4.0.dist-info/WHEEL index 1f37c02..7e68873 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_rw-1.4.0.dist-info/WHEEL +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/jsonpath_rw-1.4.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) +Generator: bdist_wheel (0.41.2) Root-Is-Purelib: true Tag: py3-none-any diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/munch-2.3.2.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/munch-2.3.2.dist-info/RECORD index 0f27efb..8956d25 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/munch-2.3.2.dist-info/RECORD +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/munch-2.3.2.dist-info/RECORD @@ -3,7 +3,7 @@ munch-2.3.2.dist-info/LICENSE.txt,sha256=V8qVySBZyDgGJRkkYpeb0ymUquP835Av9useRn7 munch-2.3.2.dist-info/METADATA,sha256=8f76jlBWV-he4FEQir_-TvfmKxfPNqm5bwIvbE6gims,865 munch-2.3.2.dist-info/RECORD,, munch-2.3.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -munch-2.3.2.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110 +munch-2.3.2.dist-info/WHEEL,sha256=iYlv5fX357PQyRT2o6tw1bN-YcKFFHKqB_LwHO5wP-g,110 munch-2.3.2.dist-info/top_level.txt,sha256=PRHN8MYaV54issXsc-3Sde-NdKBLL7BXsafd7Haw8IE,6 munch/__init__.py,sha256=UQvBwwtPbqAzNzIADInzc1fWpX38nFztUDboLQigu0o,16556 munch/python3_compat.py,sha256=fguSsh5lZOxfWI6r4hvima0N8dYn167fXqpRkqu5OEw,71 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/munch-2.3.2.dist-info/WHEEL b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/munch-2.3.2.dist-info/WHEEL index f771c29..c34f116 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/munch-2.3.2.dist-info/WHEEL +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/munch-2.3.2.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) +Generator: bdist_wheel (0.41.2) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/INSTALLER b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/LICENSE b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/LICENSE new file mode 100644 index 0000000..67db858 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/METADATA b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/METADATA new file mode 100644 index 0000000..05779fa --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/METADATA @@ -0,0 +1,122 @@ +Metadata-Version: 2.1 +Name: requests +Version: 2.31.0 +Summary: Python HTTP for Humans. +Home-page: https://requests.readthedocs.io +Author: Kenneth Reitz +Author-email: me@kennethreitz.org +License: Apache 2.0 +Project-URL: Documentation, https://requests.readthedocs.io +Project-URL: Source, https://github.com/psf/requests +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: charset-normalizer (<4,>=2) +Requires-Dist: idna (<4,>=2.5) +Requires-Dist: urllib3 (<3,>=1.21.1) +Requires-Dist: certifi (>=2017.4.17) +Provides-Extra: security +Provides-Extra: socks +Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks' +Provides-Extra: use_chardet_on_py3 +Requires-Dist: chardet (<6,>=3.0.2) ; extra == 'use_chardet_on_py3' + +# Requests + +**Requests** is a simple, yet elegant, HTTP library. + +```python +>>> import requests +>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass')) +>>> r.status_code +200 +>>> r.headers['content-type'] +'application/json; charset=utf8' +>>> r.encoding +'utf-8' +>>> r.text +'{"authenticated": true, ...' +>>> r.json() +{'authenticated': True, ...} +``` + +Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method! + +Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code. + +[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests) +[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests) +[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors) + +## Installing Requests and Supported Versions + +Requests is available on PyPI: + +```console +$ python -m pip install requests +``` + +Requests officially supports Python 3.7+. + +## Supported Features & Best–Practices + +Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today. + +- Keep-Alive & Connection Pooling +- International Domains and URLs +- Sessions with Cookie Persistence +- Browser-style TLS/SSL Verification +- Basic & Digest Authentication +- Familiar `dict`–like Cookies +- Automatic Content Decompression and Decoding +- Multi-part File Uploads +- SOCKS Proxy Support +- Connection Timeouts +- Streaming Downloads +- Automatic honoring of `.netrc` +- Chunked HTTP Requests + +## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io) + +[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io) + +## Cloning the repository + +When cloning the Requests repository, you may need to add the `-c +fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see +[this issue](https://github.com/psf/requests/issues/2690) for more background): + +```shell +git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git +``` + +You can also apply this setting to your global Git config: + +```shell +git config --global fetch.fsck.badTimezone ignore +``` + +--- + +[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf) + + diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/RECORD new file mode 100644 index 0000000..a32f5f4 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/RECORD @@ -0,0 +1,25 @@ +requests-2.31.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +requests-2.31.0.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142 +requests-2.31.0.dist-info/METADATA,sha256=eCPokOnbb0FROLrfl0R5EpDvdufsb9CaN4noJH__54I,4634 +requests-2.31.0.dist-info/RECORD,, +requests-2.31.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +requests-2.31.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +requests-2.31.0.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9 +requests/__init__.py,sha256=LvmKhjIz8mHaKXthC2Mv5ykZ1d92voyf3oJpd-VuAig,4963 +requests/__version__.py,sha256=ssI3Ezt7PaxgkOW45GhtwPUclo_SO_ygtIm4A74IOfw,435 +requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495 +requests/adapters.py,sha256=v_FmjU5KZ76k-YttShZYB5RprIzhhL8Y3zgW9p4eBQ8,19553 +requests/api.py,sha256=q61xcXq4tmiImrvcSVLTbFyCiD2F-L_-hWKGbz4y8vg,6449 +requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187 +requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429 +requests/compat.py,sha256=yxntVOSEHGMrn7FNr_32EEam1ZNAdPRdSE13_yaHzTk,1451 +requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560 +requests/exceptions.py,sha256=DhveFBclVjTRxhRduVpO-GbMYMID2gmjdLfNEqNpI_U,3811 +requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875 +requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733 +requests/models.py,sha256=-DlKi0or8gFAM6VzutobXvvBW_2wrJuOF5NfndTIddA,35223 +requests/packages.py,sha256=DXgv-FJIczZITmv0vEBAhWj4W-5CGCIN_ksvgR17Dvs,957 +requests/sessions.py,sha256=-LvTzrPtetSTrR3buxu4XhdgMrJFLB1q5D7P--L2Xhw,30373 +requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235 +requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912 +requests/utils.py,sha256=6sx2X3cIVA8BgWOg8odxFy-_lbWDFETU8HI4fU4Rmqw,33448 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/REQUESTED b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/WHEEL b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/WHEEL new file mode 100644 index 0000000..1f37c02 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/top_level.txt b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/top_level.txt new file mode 100644 index 0000000..f229360 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests-2.31.0.dist-info/top_level.txt @@ -0,0 +1 @@ +requests diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/__init__.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/__init__.py index 343e508..300a16c 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/__init__.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/__init__.py @@ -66,22 +66,22 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver # Check urllib3 for compatibility. major, minor, patch = urllib3_version # noqa: F811 major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.26 - assert major == 1 - assert minor >= 21 - assert minor <= 26 + # urllib3 >= 1.21.1 + assert major >= 1 + if major == 1: + assert minor >= 21 # Check charset_normalizer for compatibility. if chardet_version: major, minor, patch = chardet_version.split(".")[:3] major, minor, patch = int(major), int(minor), int(patch) - # chardet_version >= 3.0.2, < 5.0.0 - assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0) + # chardet_version >= 3.0.2, < 6.0.0 + assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0) elif charset_normalizer_version: major, minor, patch = charset_normalizer_version.split(".")[:3] major, minor, patch = int(major), int(minor), int(patch) - # charset_normalizer >= 2.0.0 < 3.0.0 - assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0) + # charset_normalizer >= 2.0.0 < 4.0.0 + assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) else: raise Exception("You need either charset_normalizer or chardet installed") diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/__version__.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/__version__.py index cbb5c5c..5063c3f 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/__version__.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/__version__.py @@ -5,10 +5,10 @@ __title__ = "requests" __description__ = "Python HTTP for Humans." __url__ = "https://requests.readthedocs.io" -__version__ = "2.28.0" -__build__ = 0x022800 +__version__ = "2.31.0" +__build__ = 0x023100 __author__ = "Kenneth Reitz" __author_email__ = "me@kennethreitz.org" __license__ = "Apache 2.0" -__copyright__ = "Copyright 2022 Kenneth Reitz" +__copyright__ = "Copyright Kenneth Reitz" __cake__ = "\u2728 \U0001f370 \u2728" diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/_internal_utils.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/_internal_utils.py index 7dc9bc5..f2cf635 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/_internal_utils.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/_internal_utils.py @@ -14,9 +14,11 @@ _VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") _VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") +_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR) +_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE) HEADER_VALIDATORS = { - bytes: (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE), - str: (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR), + bytes: _HEADER_VALIDATORS_BYTE, + str: _HEADER_VALIDATORS_STR, } diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/adapters.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/adapters.py index d3b2d5b..78e3bb6 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/adapters.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/adapters.py @@ -22,7 +22,6 @@ from urllib3.exceptions import ReadTimeoutError, ResponseError from urllib3.exceptions import SSLError as _SSLError from urllib3.poolmanager import PoolManager, proxy_from_url -from urllib3.response import HTTPResponse from urllib3.util import Timeout as TimeoutSauce from urllib3.util import parse_url from urllib3.util.retry import Retry @@ -194,7 +193,6 @@ def init_poolmanager( num_pools=connections, maxsize=maxsize, block=block, - strict=True, **pool_kwargs, ) @@ -485,63 +483,19 @@ def send( timeout = TimeoutSauce(connect=timeout, read=timeout) try: - if not chunked: - resp = conn.urlopen( - method=request.method, - url=url, - body=request.body, - headers=request.headers, - redirect=False, - assert_same_host=False, - preload_content=False, - decode_content=False, - retries=self.max_retries, - timeout=timeout, - ) - - # Send the request. - else: - if hasattr(conn, "proxy_pool"): - conn = conn.proxy_pool - - low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) - - try: - skip_host = "Host" in request.headers - low_conn.putrequest( - request.method, - url, - skip_accept_encoding=True, - skip_host=skip_host, - ) - - for header, value in request.headers.items(): - low_conn.putheader(header, value) - - low_conn.endheaders() - - for i in request.body: - low_conn.send(hex(len(i))[2:].encode("utf-8")) - low_conn.send(b"\r\n") - low_conn.send(i) - low_conn.send(b"\r\n") - low_conn.send(b"0\r\n\r\n") - - # Receive the response from the server - r = low_conn.getresponse() - - resp = HTTPResponse.from_httplib( - r, - pool=conn, - connection=low_conn, - preload_content=False, - decode_content=False, - ) - except Exception: - # If we hit any problems here, clean up the connection. - # Then, raise so that we can handle the actual exception. - low_conn.close() - raise + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout, + chunked=chunked, + ) except (ProtocolError, OSError) as err: raise ConnectionError(err, request=request) diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/api.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/api.py index 2f71aae..cd0b3ee 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/api.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/api.py @@ -106,7 +106,7 @@ def post(url, data=None, json=None, **kwargs): :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response @@ -121,7 +121,7 @@ def put(url, data=None, **kwargs): :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response @@ -136,7 +136,7 @@ def patch(url, data=None, **kwargs): :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/models.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/models.py index 7e15228..617a413 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/models.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/models.py @@ -438,7 +438,7 @@ def prepare_url(self, url, params): if not scheme: raise MissingSchema( f"Invalid URL {url!r}: No scheme supplied. " - f"Perhaps you meant http://{url}?" + f"Perhaps you meant https://{url}?" ) if not host: @@ -813,8 +813,7 @@ def generate(): # Special case for urllib3. if hasattr(self.raw, "stream"): try: - for chunk in self.raw.stream(chunk_size, decode_content=True): - yield chunk + yield from self.raw.stream(chunk_size, decode_content=True) except ProtocolError as e: raise ChunkedEncodingError(e) except DecodeError as e: diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/sessions.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/sessions.py index 6cb3b4d..dbcf2a7 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/sessions.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/sessions.py @@ -324,7 +324,9 @@ def rebuild_proxies(self, prepared_request, proxies): except KeyError: username, password = None, None - if username and password: + # urllib3 handles proxy authorization for us in the standard adapter. + # Avoid appending this to TLS tunneled requests where it may be leaked. + if not scheme.startswith('https') and username and password: headers["Proxy-Authorization"] = _basic_auth_str(username, password) return new_proxies diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/utils.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/utils.py index ad53583..a367417 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/utils.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/requests/utils.py @@ -25,7 +25,12 @@ from .__version__ import __version__ # to_native_string is unused here, but imported here for backwards compatibility -from ._internal_utils import HEADER_VALIDATORS, to_native_string # noqa: F401 +from ._internal_utils import ( # noqa: F401 + _HEADER_VALIDATORS_BYTE, + _HEADER_VALIDATORS_STR, + HEADER_VALIDATORS, + to_native_string, +) from .compat import ( Mapping, basestring, @@ -1031,20 +1036,23 @@ def check_header_validity(header): :param header: tuple, in the format (name, value). """ name, value = header + _validate_header_part(header, name, 0) + _validate_header_part(header, value, 1) - for part in header: - if type(part) not in HEADER_VALIDATORS: - raise InvalidHeader( - f"Header part ({part!r}) from {{{name!r}: {value!r}}} must be " - f"of type str or bytes, not {type(part)}" - ) - - _validate_header_part(name, "name", HEADER_VALIDATORS[type(name)][0]) - _validate_header_part(value, "value", HEADER_VALIDATORS[type(value)][1]) +def _validate_header_part(header, header_part, header_validator_index): + if isinstance(header_part, str): + validator = _HEADER_VALIDATORS_STR[header_validator_index] + elif isinstance(header_part, bytes): + validator = _HEADER_VALIDATORS_BYTE[header_validator_index] + else: + raise InvalidHeader( + f"Header part ({header_part!r}) from {header} " + f"must be of type str or bytes, not {type(header_part)}" + ) -def _validate_header_part(header_part, header_kind, validator): if not validator.match(header_part): + header_kind = "name" if header_validator_index == 0 else "value" raise InvalidHeader( f"Invalid leading whitespace, reserved character(s), or return" f"character(s) in header {header_kind}: {header_part!r}" diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/splunk_sdk-1.6.18.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/splunk_sdk-1.6.18.dist-info/RECORD index ca2fc00..3fa02c2 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/splunk_sdk-1.6.18.dist-info/RECORD +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/splunk_sdk-1.6.18.dist-info/RECORD @@ -2,7 +2,7 @@ splunk_sdk-1.6.18.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7z splunk_sdk-1.6.18.dist-info/METADATA,sha256=5ED-1H2wwnFJCt_D8WCmTPPXsqka2En6ECHtGBV20x0,741 splunk_sdk-1.6.18.dist-info/RECORD,, splunk_sdk-1.6.18.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -splunk_sdk-1.6.18.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +splunk_sdk-1.6.18.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92 splunk_sdk-1.6.18.dist-info/top_level.txt,sha256=taq2YYfOB31tNefwW73s54xytiB_sy8bpbyBAfn0kr8,10 splunklib/__init__.py,sha256=1J8iSAeiM_2xN9uSvFnqTT9krVVB0-8gUZvxLGl8QbA,772 splunklib/binding.py,sha256=ui61YzW0zKpYjUrdaqyyHM_mhshvyRaH5_mGbKLY8zc,58384 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/splunk_sdk-1.6.18.dist-info/WHEEL b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/splunk_sdk-1.6.18.dist-info/WHEEL index 1f37c02..7e68873 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/splunk_sdk-1.6.18.dist-info/WHEEL +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/splunk_sdk-1.6.18.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) +Generator: bdist_wheel (0.41.2) Root-Is-Purelib: true Tag: py3-none-any diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/INSTALLER b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/LICENSE b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/LICENSE new file mode 100644 index 0000000..f26bcf4 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/LICENSE @@ -0,0 +1,279 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see https://opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/METADATA b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/METADATA new file mode 100644 index 0000000..70e1d63 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/METADATA @@ -0,0 +1,69 @@ +Metadata-Version: 2.1 +Name: typing_extensions +Version: 4.7.1 +Summary: Backported and Experimental Type Hints for Python 3.7+ +Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing +Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development +Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues +Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md +Project-URL: Documentation, https://typing-extensions.readthedocs.io/ +Project-URL: Home, https://github.com/python/typing_extensions +Project-URL: Q & A, https://github.com/python/typing/discussions +Project-URL: Repository, https://github.com/python/typing_extensions + +# Typing Extensions + +[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing) + +[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) – +[PyPI](https://pypi.org/project/typing-extensions/) + +## Overview + +The `typing_extensions` module serves two related purposes: + +- Enable use of new type system features on older Python versions. For example, + `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows + users on previous Python versions to use it too. +- Enable experimentation with new type system PEPs before they are accepted and + added to the `typing` module. + +`typing_extensions` is treated specially by static type checkers such as +mypy and pyright. Objects defined in `typing_extensions` are treated the same +way as equivalent forms in `typing`. + +`typing_extensions` uses +[Semantic Versioning](https://semver.org/). The +major version will be incremented only for backwards-incompatible changes. +Therefore, it's safe to depend +on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`, +where `x.y` is the first version that includes all features you need. + +`typing_extensions` supports Python versions 3.7 and higher. + +## Included items + +See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a +complete listing of module contents. + +## Contributing + +See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md) +for how to contribute to `typing_extensions`. + diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/RECORD new file mode 100644 index 0000000..6b9ff40 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/RECORD @@ -0,0 +1,6 @@ +typing_extensions-4.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +typing_extensions-4.7.1.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936 +typing_extensions-4.7.1.dist-info/METADATA,sha256=0W71u6mC24oVYJzibNoq2l-bQnVoU_p25uiNhAq5OcA,3078 +typing_extensions-4.7.1.dist-info/RECORD,, +typing_extensions-4.7.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +typing_extensions.py,sha256=zkLXjhMMSmKvLLqj-MCunbScGMu7kPLZYUsLun38I00,111082 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/WHEEL b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/WHEEL new file mode 100644 index 0000000..3b5e64b --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions-4.7.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions.py index 6ae0c34..901f3b9 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/typing_extensions.py @@ -9,7 +9,6 @@ import typing import warnings - __all__ = [ # Super-special typing primitives. 'Any', @@ -33,6 +32,7 @@ 'Coroutine', 'AsyncGenerator', 'AsyncContextManager', + 'Buffer', 'ChainMap', # Concrete collection types. @@ -45,7 +45,13 @@ 'TypedDict', # Structural checks, a.k.a. protocols. + 'SupportsAbs', + 'SupportsBytes', + 'SupportsComplex', + 'SupportsFloat', 'SupportsIndex', + 'SupportsInt', + 'SupportsRound', # One-off things. 'Annotated', @@ -58,8 +64,11 @@ 'final', 'get_args', 'get_origin', + 'get_original_bases', + 'get_protocol_members', 'get_type_hints', 'IntVar', + 'is_protocol', 'is_typeddict', 'Literal', 'NewType', @@ -71,12 +80,52 @@ 'runtime_checkable', 'Text', 'TypeAlias', + 'TypeAliasType', 'TypeGuard', 'TYPE_CHECKING', 'Never', 'NoReturn', 'Required', 'NotRequired', + + # Pure aliases, have always been in typing + 'AbstractSet', + 'AnyStr', + 'BinaryIO', + 'Callable', + 'Collection', + 'Container', + 'Dict', + 'ForwardRef', + 'FrozenSet', + 'Generator', + 'Generic', + 'Hashable', + 'IO', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'List', + 'Mapping', + 'MappingView', + 'Match', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Optional', + 'Pattern', + 'Reversible', + 'Sequence', + 'Set', + 'Sized', + 'TextIO', + 'Tuple', + 'Union', + 'ValuesView', + 'cast', + 'no_type_check', + 'no_type_check_decorator', ] # for backward compatibility @@ -86,7 +135,13 @@ # The functions below are modified copies of typing internal helpers. # They are needed by _ProtocolMeta and they provide support for PEP 646. -_marker = object() + +class _Sentinel: + def __repr__(self): + return "" + + +_marker = _Sentinel() def _check_generic(cls, parameters, elen=_marker): @@ -187,17 +242,19 @@ def __new__(cls, *args, **kwargs): ClassVar = typing.ClassVar + +class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + # On older versions of typing there is an internal class named "Final". # 3.8+ if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): Final = typing.Final # 3.7 else: - class _FinalForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - + class _FinalForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') @@ -260,21 +317,67 @@ def IntVar(name): return typing.TypeVar(name) -# 3.8+: -if hasattr(typing, 'Literal'): +# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 +if sys.version_info >= (3, 10, 1): Literal = typing.Literal -# 3.7: else: - class _LiteralForm(typing._SpecialForm, _root=True): + def _flatten_literal_params(parameters): + """An internal helper for Literal creation: flatten Literals among parameters""" + params = [] + for p in parameters: + if isinstance(p, _LiteralGenericAlias): + params.extend(p.__args__) + else: + params.append(p) + return tuple(params) - def __repr__(self): - return 'typing_extensions.' + self._name + def _value_and_type_iter(params): + for p in params: + yield p, type(p) + + class _LiteralGenericAlias(typing._GenericAlias, _root=True): + def __eq__(self, other): + if not isinstance(other, _LiteralGenericAlias): + return NotImplemented + these_args_deduped = set(_value_and_type_iter(self.__args__)) + other_args_deduped = set(_value_and_type_iter(other.__args__)) + return these_args_deduped == other_args_deduped + + def __hash__(self): + return hash(frozenset(_value_and_type_iter(self.__args__))) + + class _LiteralForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, doc: str): + self._name = 'Literal' + self._doc = self.__doc__ = doc def __getitem__(self, parameters): - return typing._GenericAlias(self, parameters) + if not isinstance(parameters, tuple): + parameters = (parameters,) + + parameters = _flatten_literal_params(parameters) - Literal = _LiteralForm('Literal', - doc="""A type that can be used to indicate to type checkers + val_type_pairs = list(_value_and_type_iter(parameters)) + try: + deduped_pairs = set(val_type_pairs) + except TypeError: + # unhashable parameters + pass + else: + # similar logic to typing._deduplicate on Python 3.9+ + if len(deduped_pairs) < len(val_type_pairs): + new_parameters = [] + for pair in val_type_pairs: + if pair in deduped_pairs: + new_parameters.append(pair[0]) + deduped_pairs.remove(pair) + assert not deduped_pairs, deduped_pairs + parameters = tuple(new_parameters) + + return _LiteralGenericAlias(self, parameters) + + Literal = _LiteralForm(doc="""\ + A type that can be used to indicate to type checkers that the corresponding value has a value literally equivalent to the provided parameter. For example: @@ -288,7 +391,7 @@ def __getitem__(self, parameters): instead of a type.""") -_overload_dummy = typing._overload_dummy # noqa +_overload_dummy = typing._overload_dummy if hasattr(typing, "get_overloads"): # 3.11+ @@ -383,40 +486,55 @@ def clear_overloads(): Counter = typing.Counter ChainMap = typing.ChainMap AsyncGenerator = typing.AsyncGenerator -NewType = typing.NewType Text = typing.Text TYPE_CHECKING = typing.TYPE_CHECKING -_PROTO_WHITELIST = ['Callable', 'Awaitable', - 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', - 'ContextManager', 'AsyncContextManager'] +_PROTO_ALLOWLIST = { + 'collections.abc': [ + 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', + ], + 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], + 'typing_extensions': ['Buffer'], +} + + +_EXCLUDED_ATTRS = { + "__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol", + "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__", + "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__", + "__subclasshook__", "__orig_class__", "__init__", "__new__", + "__protocol_attrs__", "__callable_proto_members_only__", +} + +if sys.version_info < (3, 8): + _EXCLUDED_ATTRS |= { + "_gorg", "__next_in_mro__", "__extra__", "__tree_hash__", "__args__", + "__origin__" + } + +if sys.version_info >= (3, 9): + _EXCLUDED_ATTRS.add("__class_getitem__") + +if sys.version_info >= (3, 12): + _EXCLUDED_ATTRS.add("__type_params__") + +_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS) def _get_protocol_attrs(cls): attrs = set() for base in cls.__mro__[:-1]: # without object - if base.__name__ in ('Protocol', 'Generic'): + if base.__name__ in {'Protocol', 'Generic'}: continue annotations = getattr(base, '__annotations__', {}) - for attr in list(base.__dict__.keys()) + list(annotations.keys()): - if (not attr.startswith('_abc_') and attr not in ( - '__abstractmethods__', '__annotations__', '__weakref__', - '_is_protocol', '_is_runtime_protocol', '__dict__', - '__args__', '__slots__', - '__next_in_mro__', '__parameters__', '__origin__', - '__orig_bases__', '__extra__', '__tree_hash__', - '__doc__', '__subclasshook__', '__init__', '__new__', - '__module__', '_MutableMapping__marker', '_gorg')): + for attr in (*base.__dict__, *annotations): + if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): attrs.add(attr) return attrs -def _is_callable_members_only(cls): - return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) - - def _maybe_adjust_parameters(cls): """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__. @@ -426,7 +544,7 @@ def _maybe_adjust_parameters(cls): """ tvars = [] if '__orig_bases__' in cls.__dict__: - tvars = typing._collect_type_vars(cls.__orig_bases__) + tvars = _collect_type_vars(cls.__orig_bases__) # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. # If found, tvars must be a subset of it. # If not found, tvars is it. @@ -457,168 +575,284 @@ def _maybe_adjust_parameters(cls): cls.__parameters__ = tuple(tvars) -# 3.8+ -if hasattr(typing, 'Protocol'): +def _caller(depth=2): + try: + return sys._getframe(depth).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): # For platforms without _getframe() + return None + + +# The performance of runtime-checkable protocols is significantly improved on Python 3.12, +# so we backport the 3.12 version of Protocol to Python <=3.11 +if sys.version_info >= (3, 12): Protocol = typing.Protocol -# 3.7 else: + def _allow_reckless_class_checks(depth=3): + """Allow instance and class checks for special stdlib modules. + The abc and functools modules indiscriminately call isinstance() and + issubclass() on the whole MRO of a user class, which may contain protocols. + """ + return _caller(depth) in {'abc', 'functools', None} def _no_init(self, *args, **kwargs): if type(self)._is_protocol: raise TypeError('Protocols cannot be instantiated') - class _ProtocolMeta(abc.ABCMeta): # noqa: B024 - # This metaclass is a bit unfortunate and exists only because of the lack - # of __instancehook__. + if sys.version_info >= (3, 8): + # Inheriting from typing._ProtocolMeta isn't actually desirable, + # but is necessary to allow typing.Protocol and typing_extensions.Protocol + # to mix without getting TypeErrors about "metaclass conflict" + _typing_Protocol = typing.Protocol + _ProtocolMetaBase = type(_typing_Protocol) + else: + _typing_Protocol = _marker + _ProtocolMetaBase = abc.ABCMeta + + class _ProtocolMeta(_ProtocolMetaBase): + # This metaclass is somewhat unfortunate, + # but is necessary for several reasons... + # + # NOTE: DO NOT call super() in any methods in this class + # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11 + # and those are slow + def __new__(mcls, name, bases, namespace, **kwargs): + if name == "Protocol" and len(bases) < 2: + pass + elif {Protocol, _typing_Protocol} & set(bases): + for base in bases: + if not ( + base in {object, typing.Generic, Protocol, _typing_Protocol} + or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) + or is_protocol(base) + ): + raise TypeError( + f"Protocols can only inherit from other protocols, " + f"got {base!r}" + ) + return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) + + def __init__(cls, *args, **kwargs): + abc.ABCMeta.__init__(cls, *args, **kwargs) + if getattr(cls, "_is_protocol", False): + cls.__protocol_attrs__ = _get_protocol_attrs(cls) + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + cls.__callable_proto_members_only__ = all( + callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__ + ) + + def __subclasscheck__(cls, other): + if cls is Protocol: + return type.__subclasscheck__(cls, other) + if ( + getattr(cls, '_is_protocol', False) + and not _allow_reckless_class_checks() + ): + if not isinstance(other, type): + # Same error message as for issubclass(1, int). + raise TypeError('issubclass() arg 1 must be a class') + if ( + not cls.__callable_proto_members_only__ + and cls.__dict__.get("__subclasshook__") is _proto_hook + ): + raise TypeError( + "Protocols with non-method members don't support issubclass()" + ) + if not getattr(cls, '_is_runtime_protocol', False): + raise TypeError( + "Instance and class checks can only be used with " + "@runtime_checkable protocols" + ) + return abc.ABCMeta.__subclasscheck__(cls, other) + def __instancecheck__(cls, instance): # We need this method for situations where attributes are # assigned in __init__. - if ((not getattr(cls, '_is_protocol', False) or - _is_callable_members_only(cls)) and - issubclass(instance.__class__, cls)): + if cls is Protocol: + return type.__instancecheck__(cls, instance) + if not getattr(cls, "_is_protocol", False): + # i.e., it's a concrete subclass of a protocol + return abc.ABCMeta.__instancecheck__(cls, instance) + + if ( + not getattr(cls, '_is_runtime_protocol', False) and + not _allow_reckless_class_checks() + ): + raise TypeError("Instance and class checks can only be used with" + " @runtime_checkable protocols") + + if abc.ABCMeta.__instancecheck__(cls, instance): return True - if cls._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(cls, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(cls)): - return True - return super().__instancecheck__(instance) - - class Protocol(metaclass=_ProtocolMeta): - # There is quite a lot of overlapping code with typing.Generic. - # Unfortunately it is hard to avoid this while these live in two different - # modules. The duplicated code will be removed when Protocol is moved to typing. - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: + for attr in cls.__protocol_attrs__: + try: + val = inspect.getattr_static(instance, attr) + except AttributeError: + break + if val is None and callable(getattr(cls, attr, None)): + break + else: + return True - class C: - def meth(self) -> int: - return 0 + return False - def func(x: Proto) -> int: - return x.meth() + def __eq__(cls, other): + # Hack so that typing.Generic.__class_getitem__ + # treats typing_extensions.Protocol + # as equivalent to typing.Protocol on Python 3.8+ + if abc.ABCMeta.__eq__(cls, other) is True: + return True + return ( + cls is Protocol and other is getattr(typing, "Protocol", object()) + ) - func(C()) # Passes static type check + # This has to be defined, or the abc-module cache + # complains about classes with this metaclass being unhashable, + # if we define only __eq__! + def __hash__(cls) -> int: + return type.__hash__(cls) + + @classmethod + def _proto_hook(cls, other): + if not cls.__dict__.get('_is_protocol', False): + return NotImplemented + + for attr in cls.__protocol_attrs__: + for base in other.__mro__: + # Check if the members appears in the class dictionary... + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + + # ...or in annotations, if it is a sub-protocol. + annotations = getattr(base, '__annotations__', {}) + if ( + isinstance(annotations, collections.abc.Mapping) + and attr in annotations + and is_protocol(other) + ): + break + else: + return NotImplemented + return True - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. + if sys.version_info >= (3, 8): + class Protocol(typing.Generic, metaclass=_ProtocolMeta): + __doc__ = typing.Protocol.__doc__ + __slots__ = () + _is_protocol = True + _is_runtime_protocol = False - Protocol classes can be generic, they are defined as:: + def __init_subclass__(cls, *args, **kwargs): + super().__init_subclass__(*args, **kwargs) - class GenProto(Protocol[T]): - def meth(self) -> T: - ... - """ - __slots__ = () - _is_protocol = True + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', False): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) - def __new__(cls, *args, **kwds): - if cls is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can only be used as a base class") - return super().__new__(cls) + # Set (or override) the protocol subclass hook. + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook - @typing._tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple): - params = (params,) - if not params and cls is not typing.Tuple: - raise TypeError( - f"Parameter list to {cls.__qualname__}[...] cannot be empty") - msg = "Parameters to generic types must be types." - params = tuple(typing._type_check(p, msg) for p in params) # noqa - if cls is Protocol: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, typing.TypeVar) for p in params): - i = 0 - while isinstance(params[i], typing.TypeVar): - i += 1 - raise TypeError( - "Parameters to Protocol[...] must all be type variables." - f" Parameter {i + 1} is {params[i]}") - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Protocol[...] must all be unique") - else: - # Subscripting a regular Generic subclass. - _check_generic(cls, params, len(cls.__parameters__)) - return typing._GenericAlias(cls, params) + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init - def __init_subclass__(cls, *args, **kwargs): - if '__orig_bases__' in cls.__dict__: - error = typing.Generic in cls.__orig_bases__ - else: - error = typing.Generic in cls.__bases__ - if error: - raise TypeError("Cannot inherit from plain Generic") - _maybe_adjust_parameters(cls) + else: + class Protocol(metaclass=_ProtocolMeta): + # There is quite a lot of overlapping code with typing.Generic. + # Unfortunately it is hard to avoid this on Python <3.8, + # as the typing module on Python 3.7 doesn't let us subclass typing.Generic! + """Base class for protocol classes. Protocol classes are defined as:: + + class Proto(Protocol): + def meth(self) -> int: + ... + + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: + + class C: + def meth(self) -> int: + return 0 + + def func(x: Proto) -> int: + return x.meth() + + func(C()) # Passes static type check + + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime_checkable act + as simple-minded runtime-checkable protocols that check + only the presence of given attributes, ignoring their type signatures. + + Protocol classes can be generic, they are defined as:: + + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ + __slots__ = () + _is_protocol = True + _is_runtime_protocol = False + + def __new__(cls, *args, **kwds): + if cls is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can only be used as a base class") + return super().__new__(cls) + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple): + params = (params,) + if not params and cls is not typing.Tuple: + raise TypeError( + f"Parameter list to {cls.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) + if cls is Protocol: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, typing.TypeVar) for p in params): + i = 0 + while isinstance(params[i], typing.TypeVar): + i += 1 + raise TypeError( + "Parameters to Protocol[...] must all be type variables." + f" Parameter {i + 1} is {params[i]}") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Protocol[...] must all be unique") + else: + # Subscripting a regular Generic subclass. + _check_generic(cls, params, len(cls.__parameters__)) + return typing._GenericAlias(cls, params) - # Determine if this is a protocol or a concrete subclass. - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol for b in cls.__bases__) + def __init_subclass__(cls, *args, **kwargs): + if '__orig_bases__' in cls.__dict__: + error = typing.Generic in cls.__orig_bases__ + else: + error = typing.Generic in cls.__bases__ + if error: + raise TypeError("Cannot inherit from plain Generic") + _maybe_adjust_parameters(cls) - # Set (or override) the protocol subclass hook. - def _proto_hook(other): + # Determine if this is a protocol or a concrete subclass. if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not getattr(cls, '_is_runtime_protocol', False): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if not _is_callable_members_only(cls): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Protocols with non-method members" - " don't support issubclass()") - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook + cls._is_protocol = any(b is Protocol for b in cls.__bases__) - # We have nothing more to do for non-protocols. - if not cls._is_protocol: - return + # Set (or override) the protocol subclass hook. + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook - # Check consistency of bases. - for base in cls.__bases__: - if not (base in (object, typing.Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, _ProtocolMeta) and base._is_protocol): - raise TypeError('Protocols can only inherit from other' - f' protocols, got {repr(base)}') - cls.__init__ = _no_init + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init -# 3.8+ -if hasattr(typing, 'runtime_checkable'): +if sys.version_info >= (3, 8): runtime_checkable = typing.runtime_checkable -# 3.7 else: def runtime_checkable(cls): """Mark a protocol class as a runtime protocol, so that it @@ -628,7 +862,10 @@ def runtime_checkable(cls): This allows a simple-minded structural check very similar to the one-offs in collections.abc such as Hashable. """ - if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: + if not ( + (isinstance(cls, _ProtocolMeta) or issubclass(cls, typing.Generic)) + and getattr(cls, "_is_protocol", False) + ): raise TypeError('@runtime_checkable can be only applied to protocol classes,' f' got {cls!r}') cls._is_runtime_protocol = True @@ -639,11 +876,52 @@ def runtime_checkable(cls): runtime = runtime_checkable -# 3.8+ -if hasattr(typing, 'SupportsIndex'): +# Our version of runtime-checkable protocols is faster on Python 3.7-3.11 +if sys.version_info >= (3, 12): + SupportsInt = typing.SupportsInt + SupportsFloat = typing.SupportsFloat + SupportsComplex = typing.SupportsComplex + SupportsBytes = typing.SupportsBytes SupportsIndex = typing.SupportsIndex -# 3.7 + SupportsAbs = typing.SupportsAbs + SupportsRound = typing.SupportsRound else: + @runtime_checkable + class SupportsInt(Protocol): + """An ABC with one abstract method __int__.""" + __slots__ = () + + @abc.abstractmethod + def __int__(self) -> int: + pass + + @runtime_checkable + class SupportsFloat(Protocol): + """An ABC with one abstract method __float__.""" + __slots__ = () + + @abc.abstractmethod + def __float__(self) -> float: + pass + + @runtime_checkable + class SupportsComplex(Protocol): + """An ABC with one abstract method __complex__.""" + __slots__ = () + + @abc.abstractmethod + def __complex__(self) -> complex: + pass + + @runtime_checkable + class SupportsBytes(Protocol): + """An ABC with one abstract method __bytes__.""" + __slots__ = () + + @abc.abstractmethod + def __bytes__(self) -> bytes: + pass + @runtime_checkable class SupportsIndex(Protocol): __slots__ = () @@ -652,8 +930,45 @@ class SupportsIndex(Protocol): def __index__(self) -> int: pass + @runtime_checkable + class SupportsAbs(Protocol[T_co]): + """ + An ABC with one abstract method __abs__ that is covariant in its return type. + """ + __slots__ = () -if hasattr(typing, "Required"): + @abc.abstractmethod + def __abs__(self) -> T_co: + pass + + @runtime_checkable + class SupportsRound(Protocol[T_co]): + """ + An ABC with one abstract method __round__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +def _ensure_subclassable(mro_entries): + def inner(func): + if sys.implementation.name == "pypy" and sys.version_info < (3, 9): + cls_dict = { + "__call__": staticmethod(func), + "__mro_entries__": staticmethod(mro_entries) + } + t = type(func.__name__, (), cls_dict) + return functools.update_wrapper(t(), func) + else: + func.__mro_entries__ = mro_entries + return func + return inner + + +if sys.version_info >= (3, 13): # The standard library TypedDict in Python 3.8 does not store runtime information # about which (if any) keys are optional. See https://bugs.python.org/issue38834 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" @@ -661,108 +976,63 @@ def __index__(self) -> int: # The standard library TypedDict below Python 3.11 does not store runtime # information about optional and required keys when using Required or NotRequired. # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + # Aaaand on 3.12 we add __orig_bases__ to TypedDict + # to enable better runtime introspection. + # On 3.13 we deprecate some odd ways of creating TypedDicts. TypedDict = typing.TypedDict _TypedDictMeta = typing._TypedDictMeta is_typeddict = typing.is_typeddict else: - def _check_fails(cls, other): - try: - if sys._getframe(1).f_globals['__name__'] not in ['abc', - 'functools', - 'typing']: - # Typed dicts are only for static structural subtyping. - raise TypeError('TypedDict does not support instance and class checks') - except (AttributeError, ValueError): - pass - return False - - def _dict_new(*args, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - return dict(*args, **kwargs) - - _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' - - def _typeddict_new(*args, total=True, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - if args: - typename, args = args[0], args[1:] # allow the "_typename" keyword be passed - elif '_typename' in kwargs: - typename = kwargs.pop('_typename') - import warnings - warnings.warn("Passing '_typename' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - raise TypeError("TypedDict.__new__() missing 1 required positional " - "argument: '_typename'") - if args: - try: - fields, = args # allow the "_fields" keyword be passed - except ValueError: - raise TypeError('TypedDict.__new__() takes from 2 to 3 ' - f'positional arguments but {len(args) + 2} ' - 'were given') - elif '_fields' in kwargs and len(kwargs) == 1: - fields = kwargs.pop('_fields') - import warnings - warnings.warn("Passing '_fields' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - fields = None - - if fields is None: - fields = kwargs - elif kwargs: - raise TypeError("TypedDict takes either a dict or keyword arguments," - " but not both") + # 3.10.0 and later + _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters - ns = {'__annotations__': dict(fields)} - try: - # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass + if sys.version_info >= (3, 8): + _fake_name = "Protocol" + else: + _fake_name = "_Protocol" - return _TypedDictMeta(typename, (), ns, total=total) + class _TypedDictMeta(type): + def __new__(cls, name, bases, ns, total=True): + """Create new typed dict class object. - _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' - ' /, *, total=True, **kwargs)') + This method is called when TypedDict is subclassed, + or when TypedDict is instantiated. This way + TypedDict supports all three syntax forms described in its docstring. + Subclasses and instances of TypedDict return actual dictionaries. + """ + for base in bases: + if type(base) is not _TypedDictMeta and base is not typing.Generic: + raise TypeError('cannot inherit from both a TypedDict type ' + 'and a non-TypedDict base class') - _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters + if any(issubclass(b, typing.Generic) for b in bases): + generic_base = (typing.Generic,) + else: + generic_base = () - class _TypedDictMeta(type): - def __init__(cls, name, bases, ns, total=True): - super().__init__(name, bases, ns) + # typing.py generally doesn't let you inherit from plain Generic, unless + # the name of the class happens to be "Protocol" (or "_Protocol" on 3.7). + tp_dict = type.__new__(_TypedDictMeta, _fake_name, (*generic_base, dict), ns) + tp_dict.__name__ = name + if tp_dict.__qualname__ == _fake_name: + tp_dict.__qualname__ = name - def __new__(cls, name, bases, ns, total=True): - # Create new typed dict class object. - # This method is called directly when TypedDict is subclassed, - # or via _typeddict_new when TypedDict is instantiated. This way - # TypedDict supports all three syntaxes described in its docstring. - # Subclasses and instances of TypedDict return actual dictionaries - # via _dict_new. - ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new - # Don't insert typing.Generic into __bases__ here, - # or Generic.__init_subclass__ will raise TypeError - # in the super().__new__() call. - # Instead, monkey-patch __bases__ onto the class after it's been created. - tp_dict = super().__new__(cls, name, (dict,), ns) - - if any(issubclass(base, typing.Generic) for base in bases): - tp_dict.__bases__ = (typing.Generic, dict) - _maybe_adjust_parameters(tp_dict) + if not hasattr(tp_dict, '__orig_bases__'): + tp_dict.__orig_bases__ = bases annotations = {} own_annotations = ns.get('__annotations__', {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" - kwds = {"module": tp_dict.__module__} if _TAKES_MODULE else {} - own_annotations = { - n: typing._type_check(tp, msg, **kwds) - for n, tp in own_annotations.items() - } + if _TAKES_MODULE: + own_annotations = { + n: typing._type_check(tp, msg, module=tp_dict.__module__) + for n, tp in own_annotations.items() + } + else: + own_annotations = { + n: typing._type_check(tp, msg) + for n, tp in own_annotations.items() + } required_keys = set() optional_keys = set() @@ -796,17 +1066,25 @@ def __new__(cls, name, bases, ns, total=True): tp_dict.__total__ = total return tp_dict - __instancecheck__ = __subclasscheck__ = _check_fails + __call__ = dict # static method - TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) - TypedDict.__module__ = __name__ - TypedDict.__doc__ = \ - """A simple typed name space. At runtime it is equivalent to a plain dict. + def __subclasscheck__(cls, other): + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') - TypedDict creates a dictionary type that expects all of its - instances to have a certain set of keys, with each key + __instancecheck__ = __subclasscheck__ + + _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) + + @_ensure_subclassable(lambda bases: (_TypedDict,)) + def TypedDict(__typename, __fields=_marker, *, total=True, **kwargs): + """A simple typed namespace. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type such that a type checker will expect all + instances to have a certain set of keys, where each key is associated with a value of a consistent type. This expectation - is not checked at runtime but is only enforced by type checkers. + is not checked at runtime. + Usage:: class Point2D(TypedDict): @@ -821,14 +1099,66 @@ class Point2D(TypedDict): The type info can be accessed via the Point2D.__annotations__ dict, and the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. - TypedDict supports two additional equivalent forms:: + TypedDict supports an additional equivalent form:: - Point2D = TypedDict('Point2D', x=int, y=int, label=str) Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) - The class syntax is only supported in Python 3.6+, while two other - syntax forms work for Python 2.7 and 3.2+ + By default, all keys must be present in a TypedDict. It is possible + to override this by specifying totality:: + + class Point2D(TypedDict, total=False): + x: int + y: int + + This means that a Point2D TypedDict can have any of the keys omitted. A type + checker is only expected to support a literal False or True as the value of + the total argument. True is the default, and makes all items defined in the + class body be required. + + The Required and NotRequired special forms can also be used to mark + individual keys as being required or not required:: + + class Point2D(TypedDict): + x: int # the "x" key must always be present (Required is the default) + y: NotRequired[int] # the "y" key can be omitted + + See PEP 655 for more details on Required and NotRequired. """ + if __fields is _marker or __fields is None: + if __fields is _marker: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + + example = f"`{__typename} = TypedDict({__typename!r}, {{}})`" + deprecation_msg = ( + f"{deprecated_thing} is deprecated and will be disallowed in " + "Python 3.15. To create a TypedDict class with 0 fields " + "using the functional syntax, pass an empty dictionary, e.g. " + ) + example + "." + warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) + __fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + if kwargs: + warnings.warn( + "The kwargs-based syntax for TypedDict definitions is deprecated " + "in Python 3.11, will be removed in Python 3.13, and may not be " + "understood by third-party type checkers.", + DeprecationWarning, + stacklevel=2, + ) + + ns = {'__annotations__': dict(__fields)} + module = _caller() + if module is not None: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = module + + td = _TypedDictMeta(__typename, (), ns, total=total) + td.__orig_bases__ = (TypedDict,) + return td if hasattr(typing, "_TypedDictMeta"): _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) @@ -846,7 +1176,10 @@ class Film(TypedDict): is_typeddict(Film) # => True is_typeddict(Union[list, str]) # => False """ - return isinstance(tp, tuple(_TYPEDDICT_TYPES)) + # On 3.8, this would otherwise return True + if hasattr(typing, "TypedDict") and tp is typing.TypedDict: + return False + return isinstance(tp, _TYPEDDICT_TYPES) if hasattr(typing, "assert_type"): @@ -872,9 +1205,6 @@ def greet(name: str) -> None: if hasattr(typing, "Required"): get_type_hints = typing.get_type_hints else: - import functools - import types - # replaces _strip_annotations() def _strip_extras(t): """Strips Annotated, Required and NotRequired from a given type.""" @@ -887,12 +1217,12 @@ def _strip_extras(t): if stripped_args == t.__args__: return t return t.copy_with(stripped_args) - if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias): + if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): stripped_args = tuple(_strip_extras(a) for a in t.__args__) if stripped_args == t.__args__: return t - return types.GenericAlias(t.__origin__, stripped_args) - if hasattr(types, "UnionType") and isinstance(t, types.UnionType): + return _types.GenericAlias(t.__origin__, stripped_args) + if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): stripped_args = tuple(_strip_extras(a) for a in t.__args__) if stripped_args == t.__args__: return t @@ -1119,11 +1449,7 @@ def get_args(tp): TypeAlias = typing.TypeAlias # 3.9 elif sys.version_info[:2] >= (3, 9): - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeAliasForm + @_ExtensionsSpecialForm def TypeAlias(self, parameters): """Special marker indicating that an assignment should be recognized as a proper type alias definition by type @@ -1138,59 +1464,77 @@ def TypeAlias(self, parameters): raise TypeError(f"{self} is not subscriptable") # 3.7-3.8 else: - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name + TypeAlias = _ExtensionsSpecialForm( + 'TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example + above.""" + ) - TypeAlias = _TypeAliasForm('TypeAlias', - doc="""Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - For example:: +def _set_default(type_param, default): + if isinstance(default, (tuple, list)): + type_param.__default__ = tuple((typing._type_check(d, "Default must be a type") + for d in default)) + elif default != _marker: + type_param.__default__ = typing._type_check(default, "Default must be a type") + else: + type_param.__default__ = None - Predicate: TypeAlias = Callable[..., bool] - It's invalid when used anywhere except as in the example - above.""") +def _set_module(typevarlike): + # for pickling: + def_mod = _caller(depth=3) + if def_mod != 'typing_extensions': + typevarlike.__module__ = def_mod class _DefaultMixin: """Mixin for TypeVarLike defaults.""" __slots__ = () + __init__ = _set_default - def __init__(self, default): - if isinstance(default, (tuple, list)): - self.__default__ = tuple((typing._type_check(d, "Default must be a type") - for d in default)) - elif default != _marker: - self.__default__ = typing._type_check(default, "Default must be a type") - else: - self.__default__ = None + +# Classes using this metaclass must provide a _backported_typevarlike ClassVar +class _TypeVarLikeMeta(type): + def __instancecheck__(cls, __instance: Any) -> bool: + return isinstance(__instance, cls._backported_typevarlike) # Add default and infer_variance parameters from PEP 696 and 695 -class TypeVar(typing.TypeVar, _DefaultMixin, _root=True): +class TypeVar(metaclass=_TypeVarLikeMeta): """Type variable.""" - __module__ = 'typing' + _backported_typevarlike = typing.TypeVar - def __init__(self, name, *constraints, bound=None, - covariant=False, contravariant=False, - default=_marker, infer_variance=False): - super().__init__(name, *constraints, bound=bound, covariant=covariant, - contravariant=contravariant) - _DefaultMixin.__init__(self, default) - self.__infer_variance__ = infer_variance + def __new__(cls, name, *constraints, bound=None, + covariant=False, contravariant=False, + default=_marker, infer_variance=False): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant, + infer_variance=infer_variance) + else: + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + if infer_variance and (covariant or contravariant): + raise ValueError("Variance cannot be specified with infer_variance.") + typevar.__infer_variance__ = infer_variance + _set_default(typevar, default) + _set_module(typevar) + return typevar - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") # Python 3.10+ has PEP 612 @@ -1258,25 +1602,33 @@ def __eq__(self, other): # 3.10+ if hasattr(typing, 'ParamSpec'): - # Add default Parameter - PEP 696 - class ParamSpec(typing.ParamSpec, _DefaultMixin, _root=True): - """Parameter specification variable.""" + # Add default parameter - PEP 696 + class ParamSpec(metaclass=_TypeVarLikeMeta): + """Parameter specification.""" + + _backported_typevarlike = typing.ParamSpec + + def __new__(cls, name, *, bound=None, + covariant=False, contravariant=False, + infer_variance=False, default=_marker): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant, + infer_variance=infer_variance) + else: + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant) + paramspec.__infer_variance__ = infer_variance - __module__ = 'typing' + _set_default(paramspec, default) + _set_module(paramspec) + return paramspec - def __init__(self, name, *, bound=None, covariant=False, contravariant=False, - default=_marker): - super().__init__(name, bound=bound, covariant=covariant, - contravariant=contravariant) - _DefaultMixin.__init__(self, default) - - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") # 3.7-3.9 else: @@ -1341,11 +1693,12 @@ def kwargs(self): return ParamSpecKwargs(self) def __init__(self, name, *, bound=None, covariant=False, contravariant=False, - default=_marker): + infer_variance=False, default=_marker): super().__init__([self]) self.__name__ = name self.__covariant__ = bool(covariant) self.__contravariant__ = bool(contravariant) + self.__infer_variance__ = bool(infer_variance) if bound: self.__bound__ = typing._type_check(bound, 'Bound must be a type.') else: @@ -1353,15 +1706,14 @@ def __init__(self, name, *, bound=None, covariant=False, contravariant=False, _DefaultMixin.__init__(self, default) # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None + def_mod = _caller() if def_mod != 'typing_extensions': self.__module__ = def_mod def __repr__(self): - if self.__covariant__: + if self.__infer_variance__: + prefix = '' + elif self.__covariant__: prefix = '+' elif self.__contravariant__: prefix = '-' @@ -1436,10 +1788,10 @@ def _concatenate_getitem(self, parameters): # 3.10+ if hasattr(typing, 'Concatenate'): Concatenate = typing.Concatenate - _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811 # 3.9 elif sys.version_info[:2] >= (3, 9): - @_TypeAliasForm + @_ExtensionsSpecialForm def Concatenate(self, parameters): """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a higher order function which adds, removes or transforms parameters of a @@ -1454,10 +1806,7 @@ def Concatenate(self, parameters): return _concatenate_getitem(self, parameters) # 3.7-8 else: - class _ConcatenateForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - + class _ConcatenateForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): return _concatenate_getitem(self, parameters) @@ -1479,11 +1828,7 @@ def __getitem__(self, parameters): TypeGuard = typing.TypeGuard # 3.9 elif sys.version_info[:2] >= (3, 9): - class _TypeGuardForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeGuardForm + @_ExtensionsSpecialForm def TypeGuard(self, parameters): """Special typing form used to annotate the return type of a user-defined type guard function. ``TypeGuard`` only accepts a single type argument. @@ -1531,11 +1876,7 @@ def is_str(val: Union[str, float]): return typing._GenericAlias(self, (item,)) # 3.7-3.8 else: - class _TypeGuardForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - + class _TypeGuardForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type') @@ -1709,10 +2050,6 @@ def int_or_str(arg: int | str) -> None: Required = typing.Required NotRequired = typing.NotRequired elif sys.version_info[:2] >= (3, 9): - class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - @_ExtensionsSpecialForm def Required(self, parameters): """A special typing construct to mark a key of a total=False TypedDict @@ -1751,10 +2088,7 @@ class Movie(TypedDict): return typing._GenericAlias(self, (item,)) else: - class _RequiredForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - + class _RequiredForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') @@ -1793,28 +2127,65 @@ class Movie(TypedDict): """) -if hasattr(typing, "Unpack"): # 3.11+ +_UNPACK_DOC = """\ +Type unpack operator. + +The type unpack operator takes the child types from some container type, +such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For +example: + + # For some generic class `Foo`: + Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] + + Ts = TypeVarTuple('Ts') + # Specifies that `Bar` is generic in an arbitrary number of types. + # (Think of `Ts` as a tuple of an arbitrary number of individual + # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the + # `Generic[]`.) + class Bar(Generic[Unpack[Ts]]): ... + Bar[int] # Valid + Bar[int, str] # Also valid + +From Python 3.11, this can also be done using the `*` operator: + + Foo[*tuple[int, str]] + class Bar(Generic[*Ts]): ... + +The operator can also be used along with a `TypedDict` to annotate +`**kwargs` in a function signature. For instance: + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - *name* of type `str` and + # *year* of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + +Note that there is only some runtime checking of this operator. Not +everything the runtime allows may be accepted by static type checkers. + +For more information, see PEP 646 and PEP 692. +""" + + +if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] Unpack = typing.Unpack + + def _is_unpack(obj): + return get_origin(obj) is Unpack + elif sys.version_info[:2] >= (3, 9): - class _UnpackSpecialForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name + class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, getitem): + super().__init__(getitem) + self.__doc__ = _UNPACK_DOC class _UnpackAlias(typing._GenericAlias, _root=True): __class__ = typing.TypeVar @_UnpackSpecialForm def Unpack(self, parameters): - """A special typing construct to unpack a variadic type. For example: - - Shape = TypeVarTuple('Shape') - Batch = NewType('Batch', int) - - def add_batch_axis( - x: Array[Unpack[Shape]] - ) -> Array[Batch, Unpack[Shape]]: ... - - """ item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return _UnpackAlias(self, (item,)) @@ -1825,27 +2196,13 @@ def _is_unpack(obj): class _UnpackAlias(typing._GenericAlias, _root=True): __class__ = typing.TypeVar - class _UnpackForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - + class _UnpackForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return _UnpackAlias(self, (item,)) - Unpack = _UnpackForm( - 'Unpack', - doc="""A special typing construct to unpack a variadic type. For example: - - Shape = TypeVarTuple('Shape') - Batch = NewType('Batch', int) - - def add_batch_axis( - x: Array[Unpack[Shape]] - ) -> Array[Batch, Unpack[Shape]]: ... - - """) + Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC) def _is_unpack(obj): return isinstance(obj, _UnpackAlias) @@ -1853,21 +2210,20 @@ def _is_unpack(obj): if hasattr(typing, "TypeVarTuple"): # 3.11+ - # Add default Parameter - PEP 696 - class TypeVarTuple(typing.TypeVarTuple, _DefaultMixin, _root=True): + # Add default parameter - PEP 696 + class TypeVarTuple(metaclass=_TypeVarLikeMeta): """Type variable tuple.""" - def __init__(self, name, *, default=_marker): - super().__init__(name) - _DefaultMixin.__init__(self, default) + _backported_typevarlike = typing.TypeVarTuple - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod + def __new__(cls, name, *, default=_marker): + tvt = typing.TypeVarTuple(name) + _set_default(tvt, default) + _set_module(tvt) + return tvt + + def __init_subclass__(self, *args, **kwds): + raise TypeError("Cannot subclass special typing classes") else: class TypeVarTuple(_DefaultMixin): @@ -1925,10 +2281,7 @@ def __init__(self, name, *, default=_marker): _DefaultMixin.__init__(self, default) # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None + def_mod = _caller() if def_mod != 'typing_extensions': self.__module__ = def_mod @@ -2163,7 +2516,15 @@ def g(x: str) -> int: ... When this decorator is applied to an object, the type checker will generate a diagnostic on usage of the deprecated object. - No runtime warning is issued. The decorator sets the ``__deprecated__`` + The warning specified by ``category`` will be emitted on use + of deprecated objects. For functions, that happens on calls; + for classes, on instantiation. If the ``category`` is ``None``, + no warning is emitted. The ``stacklevel`` determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + + The decorator sets the ``__deprecated__`` attribute on the decorated object to the deprecation message passed to the decorator. If applied to an overload, the decorator must be after the ``@overload`` decorator for the attribute to @@ -2183,11 +2544,11 @@ def decorator(__arg: _T) -> _T: @functools.wraps(original_new) def __new__(cls, *args, **kwargs): warnings.warn(__msg, category=category, stacklevel=stacklevel + 1) - # Mirrors a similar check in object.__new__. - if not has_init and (args or kwargs): - raise TypeError(f"{cls.__name__}() takes no arguments") if original_new is not object.__new__: return original_new(cls, *args, **kwargs) + # Mirrors a similar check in object.__new__. + elif not has_init and (args or kwargs): + raise TypeError(f"{cls.__name__}() takes no arguments") else: return original_new(cls) @@ -2223,18 +2584,14 @@ def wrapper(*args, **kwargs): typing._check_generic = _check_generic -# Backport typing.NamedTuple as it exists in Python 3.11. +# Backport typing.NamedTuple as it exists in Python 3.12. # In 3.11, the ability to define generic `NamedTuple`s was supported. # This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. -if sys.version_info >= (3, 11): +# On 3.12, we added __orig_bases__ to call-based NamedTuples +# On 3.13, we deprecated kwargs-based NamedTuples +if sys.version_info >= (3, 13): NamedTuple = typing.NamedTuple else: - def _caller(): - try: - return sys._getframe(2).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): # For platforms without _getframe() - return None - def _make_nmtuple(name, types, module, defaults=()): fields = [n for n, t in types] annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") @@ -2276,8 +2633,11 @@ def __new__(cls, typename, bases, ns): ) nm_tpl.__bases__ = bases if typing.Generic in bases: - class_getitem = typing.Generic.__class_getitem__.__func__ - nm_tpl.__class_getitem__ = classmethod(class_getitem) + if hasattr(typing, '_generic_class_getitem'): # 3.12+ + nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) + else: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) # update from user namespace without overriding special namedtuple attributes for key in ns: if key in _prohibited_namedtuple_fields: @@ -2288,25 +2648,425 @@ def __new__(cls, typename, bases, ns): nm_tpl.__init_subclass__() return nm_tpl - def NamedTuple(__typename, __fields=None, **kwargs): - if __fields is None: - __fields = kwargs.items() + _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + @_ensure_subclassable(_namedtuple_mro_entries) + def NamedTuple(__typename, __fields=_marker, **kwargs): + """Typed version of namedtuple. + + Usage:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has an extra __annotations__ attribute, giving a + dict that maps field names to types. (The field names are also in + the _fields attribute, which is part of the namedtuple API.) + An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + if __fields is _marker: + if kwargs: + deprecated_thing = "Creating NamedTuple classes using keyword arguments" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "Use the class-based or functional syntax instead." + ) + else: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + example = f"`{__typename} = NamedTuple({__typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif __fields is None: + if kwargs: + raise TypeError( + "Cannot pass `None` as the 'fields' parameter " + "and also specify fields using keyword arguments" + ) + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + example = f"`{__typename} = NamedTuple({__typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." elif kwargs: raise TypeError("Either list of fields or keywords" " can be provided to NamedTuple, not both") - return _make_nmtuple(__typename, __fields, module=_caller()) - - NamedTuple.__doc__ = typing.NamedTuple.__doc__ - _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + if __fields is _marker or __fields is None: + warnings.warn( + deprecation_msg.format(name=deprecated_thing, remove="3.15"), + DeprecationWarning, + stacklevel=2, + ) + __fields = kwargs.items() + nt = _make_nmtuple(__typename, __fields, module=_caller()) + nt.__orig_bases__ = (NamedTuple,) + return nt # On 3.8+, alter the signature so that it matches typing.NamedTuple. # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7, # so just leave the signature as it is on 3.7. if sys.version_info >= (3, 8): - NamedTuple.__text_signature__ = '(typename, fields=None, /, **kwargs)' + _new_signature = '(typename, fields=None, /, **kwargs)' + if isinstance(NamedTuple, _types.FunctionType): + NamedTuple.__text_signature__ = _new_signature + else: + NamedTuple.__call__.__text_signature__ = _new_signature - def _namedtuple_mro_entries(bases): - assert NamedTuple in bases - return (_NamedTuple,) - NamedTuple.__mro_entries__ = _namedtuple_mro_entries +if hasattr(collections.abc, "Buffer"): + Buffer = collections.abc.Buffer +else: + class Buffer(abc.ABC): + """Base class for classes that implement the buffer protocol. + + The buffer protocol allows Python objects to expose a low-level + memory buffer interface. Before Python 3.12, it is not possible + to implement the buffer protocol in pure Python code, or even + to check whether a class implements the buffer protocol. In + Python 3.12 and higher, the ``__buffer__`` method allows access + to the buffer protocol from Python code, and the + ``collections.abc.Buffer`` ABC allows checking whether a class + implements the buffer protocol. + + To indicate support for the buffer protocol in earlier versions, + inherit from this ABC, either in a stub file or at runtime, + or use ABC registration. This ABC provides no methods, because + there is no Python-accessible methods shared by pre-3.12 buffer + classes. It is useful primarily for static checks. + + """ + + # As a courtesy, register the most common stdlib buffer classes. + Buffer.register(memoryview) + Buffer.register(bytearray) + Buffer.register(bytes) + + +# Backport of types.get_original_bases, available on 3.12+ in CPython +if hasattr(_types, "get_original_bases"): + get_original_bases = _types.get_original_bases +else: + def get_original_bases(__cls): + """Return the class's "original" bases prior to modification by `__mro_entries__`. + + Examples:: + + from typing import TypeVar, Generic + from typing_extensions import NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ + try: + return __cls.__orig_bases__ + except AttributeError: + try: + return __cls.__bases__ + except AttributeError: + raise TypeError( + f'Expected an instance of type, not {type(__cls).__name__!r}' + ) from None + + +# NewType is a class on Python 3.10+, making it pickleable +# The error message for subclassing instances of NewType was improved on 3.11+ +if sys.version_info >= (3, 11): + NewType = typing.NewType +else: + class NewType: + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy callable that simply returns its argument. Usage:: + UserId = NewType('UserId', int) + def name_by_id(user_id: UserId) -> str: + ... + UserId('user') # Fails type check + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + num = UserId(5) + 1 # type: int + """ + + def __call__(self, obj): + return obj + + def __init__(self, name, tp): + self.__qualname__ = name + if '.' in name: + name = name.rpartition('.')[-1] + self.__name__ = name + self.__supertype__ = tp + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __mro_entries__(self, bases): + # We defined __mro_entries__ to get a better error message + # if a user attempts to subclass a NewType instance. bpo-46170 + supercls_name = self.__name__ + + class Dummy: + def __init_subclass__(cls): + subcls_name = cls.__name__ + raise TypeError( + f"Cannot subclass an instance of NewType. " + f"Perhaps you were looking for: " + f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" + ) + + return (Dummy,) + + def __repr__(self): + return f'{self.__module__}.{self.__qualname__}' + + def __reduce__(self): + return self.__qualname__ + + if sys.version_info >= (3, 10): + # PEP 604 methods + # It doesn't make sense to have these methods on Python <3.10 + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + +if hasattr(typing, "TypeAliasType"): + TypeAliasType = typing.TypeAliasType +else: + def _is_unionable(obj): + """Corresponds to is_unionable() in unionobject.c in CPython.""" + return obj is None or isinstance(obj, ( + type, + _types.GenericAlias, + _types.UnionType, + TypeAliasType, + )) + + class TypeAliasType: + """Create named, parameterized type aliases. + + This provides a backport of the new `type` statement in Python 3.12: + + type ListOrSet[T] = list[T] | set[T] + + is equivalent to: + + T = TypeVar("T") + ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) + + The name ListOrSet can then be used as an alias for the type it refers to. + + The type_params argument should contain all the type parameters used + in the value of the type alias. If the alias is not generic, this + argument is omitted. + + Static type checkers should only support type aliases declared using + TypeAliasType that follow these rules: + + - The first argument (the name) must be a string literal. + - The TypeAliasType instance must be immediately assigned to a variable + of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, + as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + + """ + + def __init__(self, name: str, value, *, type_params=()): + if not isinstance(name, str): + raise TypeError("TypeAliasType name must be a string") + self.__value__ = value + self.__type_params__ = type_params + + parameters = [] + for type_param in type_params: + if isinstance(type_param, TypeVarTuple): + parameters.extend(type_param) + else: + parameters.append(type_param) + self.__parameters__ = tuple(parameters) + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + # Setting this attribute closes the TypeAliasType from further modification + self.__name__ = name + + def __setattr__(self, __name: str, __value: object) -> None: + if hasattr(self, "__name__"): + self._raise_attribute_error(__name) + super().__setattr__(__name, __value) + + def __delattr__(self, __name: str) -> Never: + self._raise_attribute_error(__name) + + def _raise_attribute_error(self, name: str) -> Never: + # Match the Python 3.12 error messages exactly + if name == "__name__": + raise AttributeError("readonly attribute") + elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: + raise AttributeError( + f"attribute '{name}' of 'typing.TypeAliasType' objects " + "is not writable" + ) + else: + raise AttributeError( + f"'typing.TypeAliasType' object has no attribute '{name}'" + ) + + def __repr__(self) -> str: + return self.__name__ + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + parameters = [ + typing._type_check( + item, f'Subscripting {self.__name__} requires a type.' + ) + for item in parameters + ] + return typing._GenericAlias(self, tuple(parameters)) + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + "type 'typing_extensions.TypeAliasType' is not an acceptable base type" + ) + + # The presence of this method convinces typing._type_check + # that TypeAliasTypes are types. + def __call__(self): + raise TypeError("Type alias is not callable") + + if sys.version_info >= (3, 10): + def __or__(self, right): + # For forward compatibility with 3.12, reject Unions + # that are not accepted by the built-in Union. + if not _is_unionable(right): + return NotImplemented + return typing.Union[self, right] + + def __ror__(self, left): + if not _is_unionable(left): + return NotImplemented + return typing.Union[left, self] + + +if hasattr(typing, "is_protocol"): + is_protocol = typing.is_protocol + get_protocol_members = typing.get_protocol_members +else: + def is_protocol(__tp: type) -> bool: + """Return True if the given type is a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ + return ( + isinstance(__tp, type) + and getattr(__tp, '_is_protocol', False) + and __tp is not Protocol + and __tp is not getattr(typing, "Protocol", object()) + ) + + def get_protocol_members(__tp: type) -> typing.FrozenSet[str]: + """Return the set of members defined in a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) + frozenset({'a', 'b'}) + + Raise a TypeError for arguments that are not Protocols. + """ + if not is_protocol(__tp): + raise TypeError(f'{__tp!r} is not a Protocol') + if hasattr(__tp, '__protocol_attrs__'): + return frozenset(__tp.__protocol_attrs__) + return frozenset(_get_protocol_attrs(__tp)) + + +# Aliases for items that have always been in typing. +# Explicitly assign these (rather than using `from typing import *` at the top), +# so that we get a CI error if one of these is deleted from typing.py +# in a future version of Python +AbstractSet = typing.AbstractSet +AnyStr = typing.AnyStr +BinaryIO = typing.BinaryIO +Callable = typing.Callable +Collection = typing.Collection +Container = typing.Container +Dict = typing.Dict +ForwardRef = typing.ForwardRef +FrozenSet = typing.FrozenSet +Generator = typing.Generator +Generic = typing.Generic +Hashable = typing.Hashable +IO = typing.IO +ItemsView = typing.ItemsView +Iterable = typing.Iterable +Iterator = typing.Iterator +KeysView = typing.KeysView +List = typing.List +Mapping = typing.Mapping +MappingView = typing.MappingView +Match = typing.Match +MutableMapping = typing.MutableMapping +MutableSequence = typing.MutableSequence +MutableSet = typing.MutableSet +Optional = typing.Optional +Pattern = typing.Pattern +Reversible = typing.Reversible +Sequence = typing.Sequence +Set = typing.Set +Sized = typing.Sized +TextIO = typing.TextIO +Tuple = typing.Tuple +Union = typing.Union +ValuesView = typing.ValuesView +cast = typing.cast +no_type_check = typing.no_type_check +no_type_check_decorator = typing.no_type_check_decorator diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/INSTALLER b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/LICENSE.txt b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/LICENSE.txt new file mode 100644 index 0000000..429a176 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/METADATA b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/METADATA new file mode 100644 index 0000000..cd2440a --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/METADATA @@ -0,0 +1,1492 @@ +Metadata-Version: 2.1 +Name: urllib3 +Version: 1.26.18 +Summary: HTTP library with thread-safe connection pooling, file post, and more. +Home-page: https://urllib3.readthedocs.io/ +Author: Andrey Petrov +Author-email: andrey.petrov@shazow.net +License: MIT +Project-URL: Documentation, https://urllib3.readthedocs.io/ +Project-URL: Code, https://github.com/urllib3/urllib3 +Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues +Keywords: urllib httplib threadsafe filepost http https ssl pooling +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.* +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +Provides-Extra: brotli +Requires-Dist: brotlicffi >=0.8.0 ; ((os_name != "nt" or python_version >= "3") and platform_python_implementation != "CPython") and extra == 'brotli' +Requires-Dist: brotli ==1.0.9 ; (os_name != "nt" and python_version < "3" and platform_python_implementation == "CPython") and extra == 'brotli' +Requires-Dist: brotlipy >=0.6.0 ; (os_name == "nt" and python_version < "3") and extra == 'brotli' +Requires-Dist: brotli >=1.0.9 ; (python_version >= "3" and platform_python_implementation == "CPython") and extra == 'brotli' +Provides-Extra: secure +Requires-Dist: pyOpenSSL >=0.14 ; extra == 'secure' +Requires-Dist: cryptography >=1.3.4 ; extra == 'secure' +Requires-Dist: idna >=2.0.0 ; extra == 'secure' +Requires-Dist: certifi ; extra == 'secure' +Requires-Dist: urllib3-secure-extra ; extra == 'secure' +Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure' +Provides-Extra: socks +Requires-Dist: PySocks !=1.5.7,<2.0,>=1.5.6 ; extra == 'socks' + + +urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the +Python ecosystem already uses urllib3 and you should too. +urllib3 brings many critical features that are missing from the Python +standard libraries: + +- Thread safety. +- Connection pooling. +- Client-side SSL/TLS verification. +- File uploads with multipart encoding. +- Helpers for retrying requests and dealing with HTTP redirects. +- Support for gzip, deflate, and brotli encoding. +- Proxy support for HTTP and SOCKS. +- 100% test coverage. + +urllib3 is powerful and easy to use: + +.. code-block:: python + + >>> import urllib3 + >>> http = urllib3.PoolManager() + >>> r = http.request('GET', 'http://httpbin.org/robots.txt') + >>> r.status + 200 + >>> r.data + 'User-agent: *\nDisallow: /deny\n' + + +Installing +---------- + +urllib3 can be installed with `pip `_:: + + $ python -m pip install urllib3 + +Alternatively, you can grab the latest source code from `GitHub `_:: + + $ git clone https://github.com/urllib3/urllib3.git + $ cd urllib3 + $ git checkout 1.26.x + $ pip install . + + +Documentation +------------- + +urllib3 has usage and reference documentation at `urllib3.readthedocs.io `_. + + +Contributing +------------ + +urllib3 happily accepts contributions. Please see our +`contributing documentation `_ +for some tips on getting started. + + +Security Disclosures +-------------------- + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure with maintainers. + + +Maintainers +----------- + +- `@sethmlarson `__ (Seth M. Larson) +- `@pquentin `__ (Quentin Pradet) +- `@theacodes `__ (Thea Flowers) +- `@haikuginger `__ (Jess Shapiro) +- `@lukasa `__ (Cory Benfield) +- `@sigmavirus24 `__ (Ian Stapleton Cordasco) +- `@shazow `__ (Andrey Petrov) + +👋 + + +Sponsorship +----------- + +If your company benefits from this library, please consider `sponsoring its +development `_. + + +For Enterprise +-------------- + +.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png + :width: 75 + :alt: Tidelift + +.. list-table:: + :widths: 10 100 + + * - |tideliftlogo| + - Professional support for urllib3 is available as part of the `Tidelift + Subscription`_. Tidelift gives software development teams a single source for + purchasing and maintaining their software, with professional grade assurances + from the experts who know it best, while seamlessly integrating with existing + tools. + +.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme + + +Changes +======= + +1.26.18 (2023-10-17) +-------------------- + +* Made body stripped from HTTP requests changing the request method to GET after HTTP 303 "See Other" redirect responses. + + +1.26.17 (2023-10-02) +-------------------- + +* Added the ``Cookie`` header to the list of headers to strip from requests when redirecting to a different host. As before, different headers can be set via ``Retry.remove_headers_on_redirect``. + + +1.26.16 (2023-05-23) +-------------------- + +* Fixed thread-safety issue where accessing a ``PoolManager`` with many distinct origins + would cause connection pools to be closed while requests are in progress (`#2954 `_) + + +1.26.15 (2023-03-10) +-------------------- + +* Fix socket timeout value when ``HTTPConnection`` is reused (`#2645 `__) +* Remove "!" character from the unreserved characters in IPv6 Zone ID parsing + (`#2899 `__) +* Fix IDNA handling of '\x80' byte (`#2901 `__) + +1.26.14 (2023-01-11) +-------------------- + +* Fixed parsing of port 0 (zero) returning None, instead of 0. (`#2850 `__) +* Removed deprecated getheaders() calls in contrib module. + +1.26.13 (2022-11-23) +-------------------- + +* Deprecated the ``HTTPResponse.getheaders()`` and ``HTTPResponse.getheader()`` methods. +* Fixed an issue where parsing a URL with leading zeroes in the port would be rejected + even when the port number after removing the zeroes was valid. +* Fixed a deprecation warning when using cryptography v39.0.0. +* Removed the ``<4`` in the ``Requires-Python`` packaging metadata field. + + +1.26.12 (2022-08-22) +-------------------- + +* Deprecated the `urllib3[secure]` extra and the `urllib3.contrib.pyopenssl` module. + Both will be removed in v2.x. See this `GitHub issue `_ + for justification and info on how to migrate. + + +1.26.11 (2022-07-25) +-------------------- + +* Fixed an issue where reading more than 2 GiB in a call to ``HTTPResponse.read`` would + raise an ``OverflowError`` on Python 3.9 and earlier. + + +1.26.10 (2022-07-07) +-------------------- + +* Removed support for Python 3.5 +* Fixed an issue where a ``ProxyError`` recommending configuring the proxy as HTTP + instead of HTTPS could appear even when an HTTPS proxy wasn't configured. + + +1.26.9 (2022-03-16) +------------------- + +* Changed ``urllib3[brotli]`` extra to favor installing Brotli libraries that are still + receiving updates like ``brotli`` and ``brotlicffi`` instead of ``brotlipy``. + This change does not impact behavior of urllib3, only which dependencies are installed. +* Fixed a socket leaking when ``HTTPSConnection.connect()`` raises an exception. +* Fixed ``server_hostname`` being forwarded from ``PoolManager`` to ``HTTPConnectionPool`` + when requesting an HTTP URL. Should only be forwarded when requesting an HTTPS URL. + + +1.26.8 (2022-01-07) +------------------- + +* Added extra message to ``urllib3.exceptions.ProxyError`` when urllib3 detects that + a proxy is configured to use HTTPS but the proxy itself appears to only use HTTP. +* Added a mention of the size of the connection pool when discarding a connection due to the pool being full. +* Added explicit support for Python 3.11. +* Deprecated the ``Retry.MAX_BACKOFF`` class property in favor of ``Retry.DEFAULT_MAX_BACKOFF`` + to better match the rest of the default parameter names. ``Retry.MAX_BACKOFF`` is removed in v2.0. +* Changed location of the vendored ``ssl.match_hostname`` function from ``urllib3.packages.ssl_match_hostname`` + to ``urllib3.util.ssl_match_hostname`` to ensure Python 3.10+ compatibility after being repackaged + by downstream distributors. +* Fixed absolute imports, all imports are now relative. + + +1.26.7 (2021-09-22) +------------------- + +* Fixed a bug with HTTPS hostname verification involving IP addresses and lack + of SNI. (Issue #2400) +* Fixed a bug where IPv6 braces weren't stripped during certificate hostname + matching. (Issue #2240) + + +1.26.6 (2021-06-25) +------------------- + +* Deprecated the ``urllib3.contrib.ntlmpool`` module. urllib3 is not able to support + it properly due to `reasons listed in this issue `_. + If you are a user of this module please leave a comment. +* Changed ``HTTPConnection.request_chunked()`` to not erroneously emit multiple + ``Transfer-Encoding`` headers in the case that one is already specified. +* Fixed typo in deprecation message to recommend ``Retry.DEFAULT_ALLOWED_METHODS``. + + +1.26.5 (2021-05-26) +------------------- + +* Fixed deprecation warnings emitted in Python 3.10. +* Updated vendored ``six`` library to 1.16.0. +* Improved performance of URL parser when splitting + the authority component. + + +1.26.4 (2021-03-15) +------------------- + +* Changed behavior of the default ``SSLContext`` when connecting to HTTPS proxy + during HTTPS requests. The default ``SSLContext`` now sets ``check_hostname=True``. + + +1.26.3 (2021-01-26) +------------------- + +* Fixed bytes and string comparison issue with headers (Pull #2141) + +* Changed ``ProxySchemeUnknown`` error message to be + more actionable if the user supplies a proxy URL without + a scheme. (Pull #2107) + + +1.26.2 (2020-11-12) +------------------- + +* Fixed an issue where ``wrap_socket`` and ``CERT_REQUIRED`` wouldn't + be imported properly on Python 2.7.8 and earlier (Pull #2052) + + +1.26.1 (2020-11-11) +------------------- + +* Fixed an issue where two ``User-Agent`` headers would be sent if a + ``User-Agent`` header key is passed as ``bytes`` (Pull #2047) + + +1.26.0 (2020-11-10) +------------------- + +* **NOTE: urllib3 v2.0 will drop support for Python 2**. + `Read more in the v2.0 Roadmap `_. + +* Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806) + +* Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that + still wish to use TLS earlier than 1.2 without a deprecation warning + should opt-in explicitly by setting ``ssl_version=ssl.PROTOCOL_TLSv1_1`` (Pull #2002) + **Starting in urllib3 v2.0: Connections that receive a ``DeprecationWarning`` will fail** + +* Deprecated ``Retry`` options ``Retry.DEFAULT_METHOD_WHITELIST``, ``Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST`` + and ``Retry(method_whitelist=...)`` in favor of ``Retry.DEFAULT_ALLOWED_METHODS``, + ``Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT``, and ``Retry(allowed_methods=...)`` + (Pull #2000) **Starting in urllib3 v2.0: Deprecated options will be removed** + +* Added default ``User-Agent`` header to every request (Pull #1750) + +* Added ``urllib3.util.SKIP_HEADER`` for skipping ``User-Agent``, ``Accept-Encoding``, + and ``Host`` headers from being automatically emitted with requests (Pull #2018) + +* Collapse ``transfer-encoding: chunked`` request data and framing into + the same ``socket.send()`` call (Pull #1906) + +* Send ``http/1.1`` ALPN identifier with every TLS handshake by default (Pull #1894) + +* Properly terminate SecureTransport connections when CA verification fails (Pull #1977) + +* Don't emit an ``SNIMissingWarning`` when passing ``server_hostname=None`` + to SecureTransport (Pull #1903) + +* Disabled requesting TLSv1.2 session tickets as they weren't being used by urllib3 (Pull #1970) + +* Suppress ``BrokenPipeError`` when writing request body after the server + has closed the socket (Pull #1524) + +* Wrap ``ssl.SSLError`` that can be raised from reading a socket (e.g. "bad MAC") + into an ``urllib3.exceptions.SSLError`` (Pull #1939) + + +1.25.11 (2020-10-19) +-------------------- + +* Fix retry backoff time parsed from ``Retry-After`` header when given + in the HTTP date format. The HTTP date was parsed as the local timezone + rather than accounting for the timezone in the HTTP date (typically + UTC) (Pull #1932, Pull #1935, Pull #1938, Pull #1949) + +* Fix issue where an error would be raised when the ``SSLKEYLOGFILE`` + environment variable was set to the empty string. Now ``SSLContext.keylog_file`` + is not set in this situation (Pull #2016) + + +1.25.10 (2020-07-22) +-------------------- + +* Added support for ``SSLKEYLOGFILE`` environment variable for + logging TLS session keys with use with programs like + Wireshark for decrypting captured web traffic (Pull #1867) + +* Fixed loading of SecureTransport libraries on macOS Big Sur + due to the new dynamic linker cache (Pull #1905) + +* Collapse chunked request bodies data and framing into one + call to ``send()`` to reduce the number of TCP packets by 2-4x (Pull #1906) + +* Don't insert ``None`` into ``ConnectionPool`` if the pool + was empty when requesting a connection (Pull #1866) + +* Avoid ``hasattr`` call in ``BrotliDecoder.decompress()`` (Pull #1858) + + +1.25.9 (2020-04-16) +------------------- + +* Added ``InvalidProxyConfigurationWarning`` which is raised when + erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently + support connecting to HTTPS proxies but will soon be able to + and we would like users to migrate properly without much breakage. + + See `this GitHub issue `_ + for more information on how to fix your proxy config. (Pull #1851) + +* Drain connection after ``PoolManager`` redirect (Pull #1817) + +* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812) + +* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805) + +* Allow the CA certificate data to be passed as a string (Pull #1804) + +* Raise ``ValueError`` if method contains control characters (Pull #1800) + +* Add ``__repr__`` to ``Timeout`` (Pull #1795) + + +1.25.8 (2020-01-20) +------------------- + +* Drop support for EOL Python 3.4 (Pull #1774) + +* Optimize _encode_invalid_chars (Pull #1787) + + +1.25.7 (2019-11-11) +------------------- + +* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734) + +* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470) + +* Fix issue where URL fragment was sent within the request target. (Pull #1732) + +* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732) + +* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703) + + +1.25.6 (2019-09-24) +------------------- + +* Fix issue where tilde (``~``) characters were incorrectly + percent-encoded in the path. (Pull #1692) + + +1.25.5 (2019-09-19) +------------------- + +* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which + caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``. + (Issue #1682) + + +1.25.4 (2019-09-19) +------------------- + +* Propagate Retry-After header settings to subsequent retries. (Pull #1607) + +* Fix edge case where Retry-After header was still respected even when + explicitly opted out of. (Pull #1607) + +* Remove dependency on ``rfc3986`` for URL parsing. + +* Fix issue where URLs containing invalid characters within ``Url.auth`` would + raise an exception instead of percent-encoding those characters. + +* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses + work well with BufferedReaders and other ``io`` module features. (Pull #1652) + +* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673) + + +1.25.3 (2019-05-23) +------------------- + +* Change ``HTTPSConnection`` to load system CA certificates + when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are + unspecified. (Pull #1608, Issue #1603) + +* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605) + + +1.25.2 (2019-04-28) +------------------- + +* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583) + +* Change ``parse_url`` to percent-encode invalid characters within the + path, query, and target components. (Pull #1586) + + +1.25.1 (2019-04-24) +------------------- + +* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579) + +* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578) + + +1.25 (2019-04-22) +----------------- + +* Require and validate certificates by default when using HTTPS (Pull #1507) + +* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487) + +* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use + encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489) + +* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext`` + implementations. (Pull #1496) + +* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, Pull #1492) + +* Fixed issue where OpenSSL would block if an encrypted client private key was + given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489) + +* Added support for Brotli content encoding. It is enabled automatically if + ``brotlipy`` package is installed which can be requested with + ``urllib3[brotli]`` extra. (Pull #1532) + +* Drop ciphers using DSS key exchange from default TLS cipher suites. + Improve default ciphers when using SecureTransport. (Pull #1496) + +* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483) + +1.24.3 (2019-05-01) +------------------- + +* Apply fix for CVE-2019-9740. (Pull #1591) + +1.24.2 (2019-04-17) +------------------- + +* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or + ``ssl_context`` parameters are specified. + +* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510) + +* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269) + + +1.24.1 (2018-11-02) +------------------- + +* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467) + +* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462) + + +1.24 (2018-10-16) +----------------- + +* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449) + +* Test against Python 3.7 on AppVeyor. (Pull #1453) + +* Early-out ipv6 checks when running on App Engine. (Pull #1450) + +* Change ambiguous description of backoff_factor (Pull #1436) + +* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442) + +* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405). + +* Add a server_hostname parameter to HTTPSConnection which allows for + overriding the SNI hostname sent in the handshake. (Pull #1397) + +* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430) + +* Fixed bug where responses with header Content-Type: message/* erroneously + raised HeaderParsingError, resulting in a warning being logged. (Pull #1439) + +* Move urllib3 to src/urllib3 (Pull #1409) + + +1.23 (2018-06-04) +----------------- + +* Allow providing a list of headers to strip from requests when redirecting + to a different host. Defaults to the ``Authorization`` header. Different + headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316) + +* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247). + +* Dropped Python 3.3 support. (Pull #1242) + +* Put the connection back in the pool when calling stream() or read_chunked() on + a chunked HEAD response. (Issue #1234) + +* Fixed pyOpenSSL-specific ssl client authentication issue when clients + attempted to auth via certificate + chain (Issue #1060) + +* Add the port to the connectionpool connect print (Pull #1251) + +* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380) + +* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088) + +* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359) + +* Added support for auth info in url for SOCKS proxy (Pull #1363) + + +1.22 (2017-07-20) +----------------- + +* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via + IPv6 proxy. (Issue #1222) + +* Made the connection pool retry on ``SSLError``. The original ``SSLError`` + is available on ``MaxRetryError.reason``. (Issue #1112) + +* Drain and release connection before recursing on retry/redirect. Fixes + deadlocks with a blocking connectionpool. (Issue #1167) + +* Fixed compatibility for cookiejar. (Issue #1229) + +* pyopenssl: Use vendored version of ``six``. (Issue #1231) + + +1.21.1 (2017-05-02) +------------------- + +* Fixed SecureTransport issue that would cause long delays in response body + delivery. (Pull #1154) + +* Fixed regression in 1.21 that threw exceptions when users passed the + ``socket_options`` flag to the ``PoolManager``. (Issue #1165) + +* Fixed regression in 1.21 that threw exceptions when users passed the + ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``. + (Pull #1157) + + +1.21 (2017-04-25) +----------------- + +* Improved performance of certain selector system calls on Python 3.5 and + later. (Pull #1095) + +* Resolved issue where the PyOpenSSL backend would not wrap SysCallError + exceptions appropriately when sending data. (Pull #1125) + +* Selectors now detects a monkey-patched select module after import for modules + that patch the select module like eventlet, greenlet. (Pull #1128) + +* Reduced memory consumption when streaming zlib-compressed responses + (as opposed to raw deflate streams). (Pull #1129) + +* Connection pools now use the entire request context when constructing the + pool key. (Pull #1016) + +* ``PoolManager.connection_from_*`` methods now accept a new keyword argument, + ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``. + (Pull #1016) + +* Add retry counter for ``status_forcelist``. (Issue #1147) + +* Added ``contrib`` module for using SecureTransport on macOS: + ``urllib3.contrib.securetransport``. (Pull #1122) + +* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes: + for schemes it does not recognise, it assumes they are case-sensitive and + leaves them unchanged. + (Issue #1080) + + +1.20 (2017-01-19) +----------------- + +* Added support for waiting for I/O using selectors other than select, + improving urllib3's behaviour with large numbers of concurrent connections. + (Pull #1001) + +* Updated the date for the system clock check. (Issue #1005) + +* ConnectionPools now correctly consider hostnames to be case-insensitive. + (Issue #1032) + +* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module + to fail when it is injected, rather than at first use. (Pull #1063) + +* Outdated versions of cryptography now cause the PyOpenSSL contrib module + to fail when it is injected, rather than at first use. (Issue #1044) + +* Automatically attempt to rewind a file-like body object when a request is + retried or redirected. (Pull #1039) + +* Fix some bugs that occur when modules incautiously patch the queue module. + (Pull #1061) + +* Prevent retries from occurring on read timeouts for which the request method + was not in the method whitelist. (Issue #1059) + +* Changed the PyOpenSSL contrib module to lazily load idna to avoid + unnecessarily bloating the memory of programs that don't need it. (Pull + #1076) + +* Add support for IPv6 literals with zone identifiers. (Pull #1013) + +* Added support for socks5h:// and socks4a:// schemes when working with SOCKS + proxies, and controlled remote DNS appropriately. (Issue #1035) + + +1.19.1 (2016-11-16) +------------------- + +* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025) + + +1.19 (2016-11-03) +----------------- + +* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when + using the default retry logic. (Pull #955) + +* Remove markers from setup.py to assist ancient setuptools versions. (Issue + #986) + +* Disallow superscripts and other integerish things in URL ports. (Issue #989) + +* Allow urllib3's HTTPResponse.stream() method to continue to work with + non-httplib underlying FPs. (Pull #990) + +* Empty filenames in multipart headers are now emitted as such, rather than + being suppressed. (Issue #1015) + +* Prefer user-supplied Host headers on chunked uploads. (Issue #1009) + + +1.18.1 (2016-10-27) +------------------- + +* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with + PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This + release fixes a vulnerability whereby urllib3 in the above configuration + would silently fail to validate TLS certificates due to erroneously setting + invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous + flags do not cause a problem in OpenSSL versions before 1.1.0, which + interprets the presence of any flag as requesting certificate validation. + + There is no PR for this patch, as it was prepared for simultaneous disclosure + and release. The master branch received the same fix in Pull #1010. + + +1.18 (2016-09-26) +----------------- + +* Fixed incorrect message for IncompleteRead exception. (Pull #973) + +* Accept ``iPAddress`` subject alternative name fields in TLS certificates. + (Issue #258) + +* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3. + (Issue #977) + +* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979) + + +1.17 (2016-09-06) +----------------- + +* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835) + +* ConnectionPool debug log now includes scheme, host, and port. (Issue #897) + +* Substantially refactored documentation. (Issue #887) + +* Used URLFetch default timeout on AppEngine, rather than hardcoding our own. + (Issue #858) + +* Normalize the scheme and host in the URL parser (Issue #833) + +* ``HTTPResponse`` contains the last ``Retry`` object, which now also + contains retries history. (Issue #848) + +* Timeout can no longer be set as boolean, and must be greater than zero. + (Pull #924) + +* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We + now use cryptography and idna, both of which are already dependencies of + PyOpenSSL. (Pull #930) + +* Fixed infinite loop in ``stream`` when amt=None. (Issue #928) + +* Try to use the operating system's certificates when we are using an + ``SSLContext``. (Pull #941) + +* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to + ChaCha20, but ChaCha20 is then preferred to everything else. (Pull #947) + +* Updated cipher suite list to remove 3DES-based cipher suites. (Pull #958) + +* Removed the cipher suite fallback to allow HIGH ciphers. (Pull #958) + +* Implemented ``length_remaining`` to determine remaining content + to be read. (Pull #949) + +* Implemented ``enforce_content_length`` to enable exceptions when + incomplete data chunks are received. (Pull #949) + +* Dropped connection start, dropped connection reset, redirect, forced retry, + and new HTTPS connection log levels to DEBUG, from INFO. (Pull #967) + + +1.16 (2016-06-11) +----------------- + +* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840) + +* Provide ``key_fn_by_scheme`` pool keying mechanism that can be + overridden. (Issue #830) + +* Normalize scheme and host to lowercase for pool keys, and include + ``source_address``. (Issue #830) + +* Cleaner exception chain in Python 3 for ``_make_request``. + (Issue #861) + +* Fixed installing ``urllib3[socks]`` extra. (Issue #864) + +* Fixed signature of ``ConnectionPool.close`` so it can actually safely be + called by subclasses. (Issue #873) + +* Retain ``release_conn`` state across retries. (Issues #651, #866) + +* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to + ``HTTPResponse`` but can be replaced with a subclass. (Issue #879) + + +1.15.1 (2016-04-11) +------------------- + +* Fix packaging to include backports module. (Issue #841) + + +1.15 (2016-04-06) +----------------- + +* Added Retry(raise_on_status=False). (Issue #720) + +* Always use setuptools, no more distutils fallback. (Issue #785) + +* Dropped support for Python 3.2. (Issue #786) + +* Chunked transfer encoding when requesting with ``chunked=True``. + (Issue #790) + +* Fixed regression with IPv6 port parsing. (Issue #801) + +* Append SNIMissingWarning messages to allow users to specify it in + the PYTHONWARNINGS environment variable. (Issue #816) + +* Handle unicode headers in Py2. (Issue #818) + +* Log certificate when there is a hostname mismatch. (Issue #820) + +* Preserve order of request/response headers. (Issue #821) + + +1.14 (2015-12-29) +----------------- + +* contrib: SOCKS proxy support! (Issue #762) + +* Fixed AppEngine handling of transfer-encoding header and bug + in Timeout defaults checking. (Issue #763) + + +1.13.1 (2015-12-18) +------------------- + +* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761) + + +1.13 (2015-12-14) +----------------- + +* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706) + +* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717) + +* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696) + +* Close connections more defensively on exception. (Issue #734) + +* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without + repeatedly flushing the decoder, to function better on Jython. (Issue #743) + +* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758) + + +1.12 (2015-09-03) +----------------- + +* Rely on ``six`` for importing ``httplib`` to work around + conflicts with other Python 3 shims. (Issue #688) + +* Add support for directories of certificate authorities, as supported by + OpenSSL. (Issue #701) + +* New exception: ``NewConnectionError``, raised when we fail to establish + a new connection, usually ``ECONNREFUSED`` socket error. + + +1.11 (2015-07-21) +----------------- + +* When ``ca_certs`` is given, ``cert_reqs`` defaults to + ``'CERT_REQUIRED'``. (Issue #650) + +* ``pip install urllib3[secure]`` will install Certifi and + PyOpenSSL as dependencies. (Issue #678) + +* Made ``HTTPHeaderDict`` usable as a ``headers`` input value + (Issues #632, #679) + +* Added `urllib3.contrib.appengine `_ + which has an ``AppEngineManager`` for using ``URLFetch`` in a + Google AppEngine environment. (Issue #664) + +* Dev: Added test suite for AppEngine. (Issue #631) + +* Fix performance regression when using PyOpenSSL. (Issue #626) + +* Passing incorrect scheme (e.g. ``foo://``) will raise + ``ValueError`` instead of ``AssertionError`` (backwards + compatible for now, but please migrate). (Issue #640) + +* Fix pools not getting replenished when an error occurs during a + request using ``release_conn=False``. (Issue #644) + +* Fix pool-default headers not applying for url-encoded requests + like GET. (Issue #657) + +* log.warning in Python 3 when headers are skipped due to parsing + errors. (Issue #642) + +* Close and discard connections if an error occurs during read. + (Issue #660) + +* Fix host parsing for IPv6 proxies. (Issue #668) + +* Separate warning type SubjectAltNameWarning, now issued once + per host. (Issue #671) + +* Fix ``httplib.IncompleteRead`` not getting converted to + ``ProtocolError`` when using ``HTTPResponse.stream()`` + (Issue #674) + +1.10.4 (2015-05-03) +------------------- + +* Migrate tests to Tornado 4. (Issue #594) + +* Append default warning configuration rather than overwrite. + (Issue #603) + +* Fix streaming decoding regression. (Issue #595) + +* Fix chunked requests losing state across keep-alive connections. + (Issue #599) + +* Fix hanging when chunked HEAD response has no body. (Issue #605) + + +1.10.3 (2015-04-21) +------------------- + +* Emit ``InsecurePlatformWarning`` when SSLContext object is missing. + (Issue #558) + +* Fix regression of duplicate header keys being discarded. + (Issue #563) + +* ``Response.stream()`` returns a generator for chunked responses. + (Issue #560) + +* Set upper-bound timeout when waiting for a socket in PyOpenSSL. + (Issue #585) + +* Work on platforms without `ssl` module for plain HTTP requests. + (Issue #587) + +* Stop relying on the stdlib's default cipher list. (Issue #588) + + +1.10.2 (2015-02-25) +------------------- + +* Fix file descriptor leakage on retries. (Issue #548) + +* Removed RC4 from default cipher list. (Issue #551) + +* Header performance improvements. (Issue #544) + +* Fix PoolManager not obeying redirect retry settings. (Issue #553) + + +1.10.1 (2015-02-10) +------------------- + +* Pools can be used as context managers. (Issue #545) + +* Don't re-use connections which experienced an SSLError. (Issue #529) + +* Don't fail when gzip decoding an empty stream. (Issue #535) + +* Add sha256 support for fingerprint verification. (Issue #540) + +* Fixed handling of header values containing commas. (Issue #533) + + +1.10 (2014-12-14) +----------------- + +* Disabled SSLv3. (Issue #473) + +* Add ``Url.url`` property to return the composed url string. (Issue #394) + +* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412) + +* ``MaxRetryError.reason`` will always be an exception, not string. + (Issue #481) + +* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492) + +* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473) + +* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request. + (Issue #496) + +* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``. + (Issue #499) + +* Close and discard sockets which experienced SSL-related errors. + (Issue #501) + +* Handle ``body`` param in ``.request(...)``. (Issue #513) + +* Respect timeout with HTTPS proxy. (Issue #505) + +* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520) + + +1.9.1 (2014-09-13) +------------------ + +* Apply socket arguments before binding. (Issue #427) + +* More careful checks if fp-like object is closed. (Issue #435) + +* Fixed packaging issues of some development-related files not + getting included. (Issue #440) + +* Allow performing *only* fingerprint verification. (Issue #444) + +* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445) + +* Fixed PyOpenSSL compatibility with PyPy. (Issue #450) + +* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3. + (Issue #443) + + + +1.9 (2014-07-04) +---------------- + +* Shuffled around development-related files. If you're maintaining a distro + package of urllib3, you may need to tweak things. (Issue #415) + +* Unverified HTTPS requests will trigger a warning on the first request. See + our new `security documentation + `_ for details. + (Issue #426) + +* New retry logic and ``urllib3.util.retry.Retry`` configuration object. + (Issue #326) + +* All raised exceptions should now wrapped in a + ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326) + +* All errors during a retry-enabled request should be wrapped in + ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions + which were previously exempt. Underlying error is accessible from the + ``.reason`` property. (Issue #326) + +* ``urllib3.exceptions.ConnectionError`` renamed to + ``urllib3.exceptions.ProtocolError``. (Issue #326) + +* Errors during response read (such as IncompleteRead) are now wrapped in + ``urllib3.exceptions.ProtocolError``. (Issue #418) + +* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``. + (Issue #417) + +* Catch read timeouts over SSL connections as + ``urllib3.exceptions.ReadTimeoutError``. (Issue #419) + +* Apply socket arguments before connecting. (Issue #427) + + +1.8.3 (2014-06-23) +------------------ + +* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385) + +* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393) + +* Wrap ``socket.timeout`` exception with + ``urllib3.exceptions.ReadTimeoutError``. (Issue #399) + +* Fixed proxy-related bug where connections were being reused incorrectly. + (Issues #366, #369) + +* Added ``socket_options`` keyword parameter which allows to define + ``setsockopt`` configuration of new sockets. (Issue #397) + +* Removed ``HTTPConnection.tcp_nodelay`` in favor of + ``HTTPConnection.default_socket_options``. (Issue #397) + +* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411) + + +1.8.2 (2014-04-17) +------------------ + +* Fix ``urllib3.util`` not being included in the package. + + +1.8.1 (2014-04-17) +------------------ + +* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356) + +* Don't install ``dummyserver`` into ``site-packages`` as it's only needed + for the test suite. (Issue #362) + +* Added support for specifying ``source_address``. (Issue #352) + + +1.8 (2014-03-04) +---------------- + +* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in + username, and blank ports like 'hostname:'). + +* New ``urllib3.connection`` module which contains all the HTTPConnection + objects. + +* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor + signature to a more sensible order. [Backwards incompatible] + (Issues #252, #262, #263) + +* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274) + +* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which + returns the number of bytes read so far. (Issue #277) + +* Support for platforms without threading. (Issue #289) + +* Expand default-port comparison in ``HTTPConnectionPool.is_same_host`` + to allow a pool with no specified port to be considered equal to to an + HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305) + +* Improved default SSL/TLS settings to avoid vulnerabilities. + (Issue #309) + +* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors. + (Issue #310) + +* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests + will send the entire HTTP request ~200 milliseconds faster; however, some of + the resulting TCP packets will be smaller. (Issue #254) + +* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl`` + from the default 64 to 1024 in a single certificate. (Issue #318) + +* Headers are now passed and stored as a custom + ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``. + (Issue #329, #333) + +* Headers no longer lose their case on Python 3. (Issue #236) + +* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA + certificates on inject. (Issue #332) + +* Requests with ``retries=False`` will immediately raise any exceptions without + wrapping them in ``MaxRetryError``. (Issue #348) + +* Fixed open socket leak with SSL-related failures. (Issue #344, #348) + + +1.7.1 (2013-09-25) +------------------ + +* Added granular timeout support with new ``urllib3.util.Timeout`` class. + (Issue #231) + +* Fixed Python 3.4 support. (Issue #238) + + +1.7 (2013-08-14) +---------------- + +* More exceptions are now pickle-able, with tests. (Issue #174) + +* Fixed redirecting with relative URLs in Location header. (Issue #178) + +* Support for relative urls in ``Location: ...`` header. (Issue #179) + +* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus + file-like functionality. (Issue #187) + +* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will + skip hostname verification for SSL connections. (Issue #194) + +* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a + generator wrapped around ``.read(...)``. (Issue #198) + +* IPv6 url parsing enforces brackets around the hostname. (Issue #199) + +* Fixed thread race condition in + ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204) + +* ``ProxyManager`` requests now include non-default port in ``Host: ...`` + header. (Issue #217) + +* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139) + +* New ``RequestField`` object can be passed to the ``fields=...`` param which + can specify headers. (Issue #220) + +* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails. + (Issue #221) + +* Use international headers when posting file names. (Issue #119) + +* Improved IPv6 support. (Issue #203) + + +1.6 (2013-04-25) +---------------- + +* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156) + +* ``ProxyManager`` automatically adds ``Host: ...`` header if not given. + +* Improved SSL-related code. ``cert_req`` now optionally takes a string like + "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23" + The string values reflect the suffix of the respective constant variable. + (Issue #130) + +* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly + closed proxy connections and larger read buffers. (Issue #135) + +* Ensure the connection is closed if no data is received, fixes connection leak + on some platforms. (Issue #133) + +* Added SNI support for SSL/TLS connections on Py32+. (Issue #89) + +* Tests fixed to be compatible with Py26 again. (Issue #125) + +* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant + to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109) + +* Allow an explicit content type to be specified when encoding file fields. + (Issue #126) + +* Exceptions are now pickleable, with tests. (Issue #101) + +* Fixed default headers not getting passed in some cases. (Issue #99) + +* Treat "content-encoding" header value as case-insensitive, per RFC 2616 + Section 3.5. (Issue #110) + +* "Connection Refused" SocketErrors will get retried rather than raised. + (Issue #92) + +* Updated vendored ``six``, no longer overrides the global ``six`` module + namespace. (Issue #113) + +* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding + the exception that prompted the final retry. If ``reason is None`` then it + was due to a redirect. (Issue #92, #114) + +* Fixed ``PoolManager.urlopen()`` from not redirecting more than once. + (Issue #149) + +* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters + that are not files. (Issue #111) + +* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122) + +* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or + against an arbitrary hostname (when connecting by IP or for misconfigured + servers). (Issue #140) + +* Streaming decompression support. (Issue #159) + + +1.5 (2012-08-02) +---------------- + +* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug + logging in urllib3. + +* Native full URL parsing (including auth, path, query, fragment) available in + ``urllib3.util.parse_url(url)``. + +* Built-in redirect will switch method to 'GET' if status code is 303. + (Issue #11) + +* ``urllib3.PoolManager`` strips the scheme and host before sending the request + uri. (Issue #8) + +* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding, + based on the Content-Type header, fails. + +* Fixed bug with pool depletion and leaking connections (Issue #76). Added + explicit connection closing on pool eviction. Added + ``urllib3.PoolManager.clear()``. + +* 99% -> 100% unit test coverage. + + +1.4 (2012-06-16) +---------------- + +* Minor AppEngine-related fixes. + +* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``. + +* Improved url parsing. (Issue #73) + +* IPv6 url support. (Issue #72) + + +1.3 (2012-03-25) +---------------- + +* Removed pre-1.0 deprecated API. + +* Refactored helpers into a ``urllib3.util`` submodule. + +* Fixed multipart encoding to support list-of-tuples for keys with multiple + values. (Issue #48) + +* Fixed multiple Set-Cookie headers in response not getting merged properly in + Python 3. (Issue #53) + +* AppEngine support with Py27. (Issue #61) + +* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs + bytes. + + +1.2.2 (2012-02-06) +------------------ + +* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47) + + +1.2.1 (2012-02-05) +------------------ + +* Fixed another bug related to when ``ssl`` module is not available. (Issue #41) + +* Location parsing errors now raise ``urllib3.exceptions.LocationParseError`` + which inherits from ``ValueError``. + + +1.2 (2012-01-29) +---------------- + +* Added Python 3 support (tested on 3.2.2) + +* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2) + +* Use ``select.poll`` instead of ``select.select`` for platforms that support + it. + +* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive + connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``. + +* Fixed ``ImportError`` during install when ``ssl`` module is not available. + (Issue #41) + +* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not + completing properly. (Issue #28, uncovered by Issue #10 in v1.1) + +* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` + + ``eventlet``. Removed extraneous unsupported dummyserver testing backends. + Added socket-level tests. + +* More tests. Achievement Unlocked: 99% Coverage. + + +1.1 (2012-01-07) +---------------- + +* Refactored ``dummyserver`` to its own root namespace module (used for + testing). + +* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in + Py32's ``ssl_match_hostname``. (Issue #25) + +* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10) + +* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue + #27) + +* Fixed timeout-related bugs. (Issues #17, #23) + + +1.0.2 (2011-11-04) +------------------ + +* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if + you're using the object manually. (Thanks pyos) + +* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by + wrapping the access log in a mutex. (Thanks @christer) + +* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and + ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code. + + +1.0.1 (2011-10-10) +------------------ + +* Fixed a bug where the same connection would get returned into the pool twice, + causing extraneous "HttpConnectionPool is full" log warnings. + + +1.0 (2011-10-08) +---------------- + +* Added ``PoolManager`` with LRU expiration of connections (tested and + documented). +* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works + with HTTPS proxies). +* Added optional partial-read support for responses when + ``preload_content=False``. You can now make requests and just read the headers + without loading the content. +* Made response decoding optional (default on, same as before). +* Added optional explicit boundary string for ``encode_multipart_formdata``. +* Convenience request methods are now inherited from ``RequestMethods``. Old + helpers like ``get_url`` and ``post_url`` should be abandoned in favour of + the new ``request(method, url, ...)``. +* Refactored code to be even more decoupled, reusable, and extendable. +* License header added to ``.py`` files. +* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code + and docs in ``docs/`` and on https://urllib3.readthedocs.io/. +* Embettered all the things! +* Started writing this file. + + +0.4.1 (2011-07-17) +------------------ + +* Minor bug fixes, code cleanup. + + +0.4 (2011-03-01) +---------------- + +* Better unicode support. +* Added ``VerifiedHTTPSConnection``. +* Added ``NTLMConnectionPool`` in contrib. +* Minor improvements. + + +0.3.1 (2010-07-13) +------------------ + +* Added ``assert_host_name`` optional parameter. Now compatible with proxies. + + +0.3 (2009-12-10) +---------------- + +* Added HTTPS support. +* Minor bug fixes. +* Refactored, broken backwards compatibility with 0.2. +* API to be treated as stable from this version forward. + + +0.2 (2008-11-17) +---------------- + +* Added unit tests. +* Bug fixes. + + +0.1 (2008-11-16) +---------------- + +* First release. diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/RECORD b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/RECORD new file mode 100644 index 0000000..a5a1613 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/RECORD @@ -0,0 +1,46 @@ +urllib3-1.26.18.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +urllib3-1.26.18.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115 +urllib3-1.26.18.dist-info/METADATA,sha256=0-mrjgLHzqam8qc4NJeQ1kZaJYWIalBUadimzyzZdOU,48910 +urllib3-1.26.18.dist-info/RECORD,, +urllib3-1.26.18.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3-1.26.18.dist-info/WHEEL,sha256=iYlv5fX357PQyRT2o6tw1bN-YcKFFHKqB_LwHO5wP-g,110 +urllib3-1.26.18.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8 +urllib3/__init__.py,sha256=iXLcYiJySn0GNbWOOZDDApgBL1JgP44EZ8i1760S8Mc,3333 +urllib3/_collections.py,sha256=pyASJJhW7wdOpqJj9QJA8FyGRfr8E8uUUhqUvhF0728,11372 +urllib3/_version.py,sha256=cuJvnSrWxXGYgQ3-ZRoPMw8-qaN5tpw71jnH1t16dLA,64 +urllib3/connection.py,sha256=92k9td_y4PEiTIjNufCUa1NzMB3J3w0LEdyokYgXnW8,20300 +urllib3/connectionpool.py,sha256=Be6q65SR9laoikg-h_jmc_p8OWtEmwgq_Om_Xtig-2M,40285 +urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 +urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632 +urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922 +urllib3/contrib/appengine.py,sha256=6IBW6lPOoVUxASPwtn6IH1AATe5DK3lLJCfwyWlLKAE,11012 +urllib3/contrib/ntlmpool.py,sha256=NlfkW7WMdW8ziqudopjHoW299og1BTWi0IeIibquFwk,4528 +urllib3/contrib/pyopenssl.py,sha256=4AJAlo9NmjWofY4dJwRa4kbZuRuHfNJxu8Pv6yQk1ss,17055 +urllib3/contrib/securetransport.py,sha256=0YMMfoHyEc0TBwCkIgtCfygWGm3o4MXztlxn6zbav_U,34431 +urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097 +urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217 +urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579 +urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440 +urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417 +urllib3/packages/backports/weakref_finalize.py,sha256=tRCal5OAhNSRyb0DhHp-38AtIlCsRP8BxF3NX-6rqIA,5343 +urllib3/packages/six.py,sha256=b9LM0wBXv7E7SrbCjAm4wwN-hrH-iNxv18LgWNMMKPo,34665 +urllib3/poolmanager.py,sha256=aWyhXRtNO4JUnCSVVqKTKQd8EXTvUm1VN9pgs2bcONo,19990 +urllib3/request.py,sha256=YTWFNr7QIwh7E1W9dde9LM77v2VWTJ5V78XuTTw7D1A,6691 +urllib3/response.py,sha256=UPgLmnHj4z71ZnH8ivYOyncATifTOw9FQukUqDnckCc,30761 +urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155 +urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901 +urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605 +urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498 +urllib3/util/request.py,sha256=fWiAaa8pwdLLIqoTLBxCC2e4ed80muzKU3e3HWWTzFQ,4225 +urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510 +urllib3/util/retry.py,sha256=Z6WEf518eTOXP5jr5QSQ9gqJI0DVYt3Xs3EKnYaTmus,22013 +urllib3/util/ssl_.py,sha256=c0sYiSC6272r6uPkxQpo5rYPP9QC1eR6oI7004gYqZo,17165 +urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758 +urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895 +urllib3/util/timeout.py,sha256=cwq4dMk87mJHSBktK1miYJ-85G-3T3RmT20v7SFCpno,10168 +urllib3/util/url.py,sha256=kMxL1k0d-aQm_iZDw_zMmnyYyjrIA_DbsMy3cm3V55M,14279 +urllib3/util/wait.py,sha256=fOX0_faozG2P7iVojQoE1mbydweNyTcm-hXEfFrTtLI,5403 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/REQUESTED b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/WHEEL b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/WHEEL new file mode 100644 index 0000000..c34f116 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/top_level.txt b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/top_level.txt new file mode 100644 index 0000000..a42590b --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3-1.26.18.dist-info/top_level.txt @@ -0,0 +1 @@ +urllib3 diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/_collections.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/_collections.py index da9857e..bceb845 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/_collections.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/_collections.py @@ -268,6 +268,24 @@ def getlist(self, key, default=__marker): else: return vals[1:] + def _prepare_for_method_change(self): + """ + Remove content-specific header fields before changing the request + method to GET or HEAD according to RFC 9110, Section 15.4. + """ + content_specific_headers = [ + "Content-Encoding", + "Content-Language", + "Content-Location", + "Content-Type", + "Content-Length", + "Digest", + "Last-Modified", + ] + for header in content_specific_headers: + self.discard(header) + return self + # Backwards compatibility for httplib getheaders = getlist getallmatchingheaders = getlist diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/_version.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/_version.py index e12dd0e..85e725e 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/_version.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.15" +__version__ = "1.26.18" diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/connectionpool.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/connectionpool.py index c23d736..5a6adcb 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/connectionpool.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/connectionpool.py @@ -9,6 +9,7 @@ from socket import error as SocketError from socket import timeout as SocketTimeout +from ._collections import HTTPHeaderDict from .connection import ( BaseSSLError, BrokenPipeError, @@ -50,6 +51,13 @@ from .util.url import _normalize_host as normalize_host from .util.url import get_host, parse_url +try: # Platform-specific: Python 3 + import weakref + + weakref_finalize = weakref.finalize +except AttributeError: # Platform-specific: Python 2 + from .packages.backports.weakref_finalize import weakref_finalize + xrange = six.moves.xrange log = logging.getLogger(__name__) @@ -220,6 +228,16 @@ def __init__( self.conn_kw["proxy"] = self.proxy self.conn_kw["proxy_config"] = self.proxy_config + # Do not pass 'self' as callback to 'finalize'. + # Then the 'finalize' would keep an endless living (leak) to self. + # By just passing a reference to the pool allows the garbage collector + # to free self if nobody else has a reference to it. + pool = self.pool + + # Close all the HTTPConnections in the pool before the + # HTTPConnectionPool object is garbage collected. + weakref_finalize(self, _close_pool_connections, pool) + def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. @@ -489,14 +507,8 @@ def close(self): # Disable access to the pool old_pool, self.pool = self.pool, None - try: - while True: - conn = old_pool.get(block=False) - if conn: - conn.close() - - except queue.Empty: - pass # Done. + # Close all the HTTPConnections in the pool. + _close_pool_connections(old_pool) def is_same_host(self, url): """ @@ -832,7 +844,11 @@ def _is_ssl_error_message_from_http_proxy(ssl_error): redirect_location = redirect and response.get_redirect_location() if redirect_location: if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. method = "GET" + # And lose the body not to transfer anything sensitive. + body = None + headers = HTTPHeaderDict(headers)._prepare_for_method_change() try: retries = retries.increment(method, url, response=response, _pool=self) @@ -1108,3 +1124,14 @@ def _normalize_host(host, scheme): if host.startswith("[") and host.endswith("]"): host = host[1:-1] return host + + +def _close_pool_connections(pool): + """Drains a queue of connections and closes each one.""" + try: + while True: + conn = pool.get(block=False) + if conn: + conn.close() + except queue.Empty: + pass # Done. diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/contrib/securetransport.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/contrib/securetransport.py index 6c46a3b..e311c0c 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/contrib/securetransport.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/contrib/securetransport.py @@ -64,9 +64,8 @@ import threading import weakref -import six - from .. import util +from ..packages import six from ..util.ssl_ import PROTOCOL_TLS_CLIENT from ._securetransport.bindings import CoreFoundation, Security, SecurityConst from ._securetransport.low_level import ( diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/packages/backports/weakref_finalize.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/packages/backports/weakref_finalize.py new file mode 100644 index 0000000..a2f2966 --- /dev/null +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/packages/backports/weakref_finalize.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +""" +backports.weakref_finalize +~~~~~~~~~~~~~~~~~~ + +Backports the Python 3 ``weakref.finalize`` method. +""" +from __future__ import absolute_import + +import itertools +import sys +from weakref import ref + +__all__ = ["weakref_finalize"] + + +class weakref_finalize(object): + """Class for finalization of weakrefable objects + finalize(obj, func, *args, **kwargs) returns a callable finalizer + object which will be called when obj is garbage collected. The + first time the finalizer is called it evaluates func(*arg, **kwargs) + and returns the result. After this the finalizer is dead, and + calling it just returns None. + When the program exits any remaining finalizers for which the + atexit attribute is true will be run in reverse order of creation. + By default atexit is true. + """ + + # Finalizer objects don't have any state of their own. They are + # just used as keys to lookup _Info objects in the registry. This + # ensures that they cannot be part of a ref-cycle. + + __slots__ = () + _registry = {} + _shutdown = False + _index_iter = itertools.count() + _dirty = False + _registered_with_atexit = False + + class _Info(object): + __slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index") + + def __init__(self, obj, func, *args, **kwargs): + if not self._registered_with_atexit: + # We may register the exit function more than once because + # of a thread race, but that is harmless + import atexit + + atexit.register(self._exitfunc) + weakref_finalize._registered_with_atexit = True + info = self._Info() + info.weakref = ref(obj, self) + info.func = func + info.args = args + info.kwargs = kwargs or None + info.atexit = True + info.index = next(self._index_iter) + self._registry[self] = info + weakref_finalize._dirty = True + + def __call__(self, _=None): + """If alive then mark as dead and return func(*args, **kwargs); + otherwise return None""" + info = self._registry.pop(self, None) + if info and not self._shutdown: + return info.func(*info.args, **(info.kwargs or {})) + + def detach(self): + """If alive then mark as dead and return (obj, func, args, kwargs); + otherwise return None""" + info = self._registry.get(self) + obj = info and info.weakref() + if obj is not None and self._registry.pop(self, None): + return (obj, info.func, info.args, info.kwargs or {}) + + def peek(self): + """If alive then return (obj, func, args, kwargs); + otherwise return None""" + info = self._registry.get(self) + obj = info and info.weakref() + if obj is not None: + return (obj, info.func, info.args, info.kwargs or {}) + + @property + def alive(self): + """Whether finalizer is alive""" + return self in self._registry + + @property + def atexit(self): + """Whether finalizer should be called at exit""" + info = self._registry.get(self) + return bool(info) and info.atexit + + @atexit.setter + def atexit(self, value): + info = self._registry.get(self) + if info: + info.atexit = bool(value) + + def __repr__(self): + info = self._registry.get(self) + obj = info and info.weakref() + if obj is None: + return "<%s object at %#x; dead>" % (type(self).__name__, id(self)) + else: + return "<%s object at %#x; for %r at %#x>" % ( + type(self).__name__, + id(self), + type(obj).__name__, + id(obj), + ) + + @classmethod + def _select_for_exit(cls): + # Return live finalizers marked for exit, oldest first + L = [(f, i) for (f, i) in cls._registry.items() if i.atexit] + L.sort(key=lambda item: item[1].index) + return [f for (f, i) in L] + + @classmethod + def _exitfunc(cls): + # At shutdown invoke finalizers for which atexit is true. + # This is called once all other non-daemonic threads have been + # joined. + reenable_gc = False + try: + if cls._registry: + import gc + + if gc.isenabled(): + reenable_gc = True + gc.disable() + pending = None + while True: + if pending is None or weakref_finalize._dirty: + pending = cls._select_for_exit() + weakref_finalize._dirty = False + if not pending: + break + f = pending.pop() + try: + # gc is disabled, so (assuming no daemonic + # threads) the following is the only line in + # this function which might trigger creation + # of a new finalizer + f() + except Exception: + sys.excepthook(*sys.exc_info()) + assert f not in cls._registry + finally: + # prevent any more finalizers from executing during shutdown + weakref_finalize._shutdown = True + if reenable_gc: + gc.enable() diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/poolmanager.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/poolmanager.py index ca4ec34..fb51bf7 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/poolmanager.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/poolmanager.py @@ -4,7 +4,7 @@ import functools import logging -from ._collections import RecentlyUsedContainer +from ._collections import HTTPHeaderDict, RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme from .exceptions import ( LocationValueError, @@ -171,7 +171,7 @@ class PoolManager(RequestMethods): def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) self.connection_pool_kw = connection_pool_kw - self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) + self.pools = RecentlyUsedContainer(num_pools) # Locally set the pool classes and keys so other PoolManagers can # override them. @@ -382,9 +382,12 @@ def urlopen(self, method, url, redirect=True, **kw): # Support relative URLs for redirecting. redirect_location = urljoin(url, redirect_location) - # RFC 7231, Section 6.4.4 if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. method = "GET" + # And lose the body not to transfer anything sensitive. + kw["body"] = None + kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change() retries = kw.get("retries") if not isinstance(retries, Retry): diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/request.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/request.py index 398386a..3b4cf99 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/request.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/request.py @@ -1,6 +1,9 @@ from __future__ import absolute_import +import sys + from .filepost import encode_multipart_formdata +from .packages import six from .packages.six.moves.urllib.parse import urlencode __all__ = ["RequestMethods"] @@ -168,3 +171,21 @@ def request_encode_body( extra_kw.update(urlopen_kw) return self.urlopen(method, url, **extra_kw) + + +if not six.PY2: + + class RequestModule(sys.modules[__name__].__class__): + def __call__(self, *args, **kwargs): + """ + If user tries to call this module directly urllib3 v2.x style raise an error to the user + suggesting they may need urllib3 v2 + """ + raise TypeError( + "'module' object is not callable\n" + "urllib3.request() method is not supported in this release, " + "upgrade to urllib3 v2 to use it\n" + "see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html" + ) + + sys.modules[__name__].__class__ = RequestModule diff --git a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/util/retry.py b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/util/retry.py index 2490d5e..60ef6c4 100644 --- a/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/util/retry.py +++ b/sigsci_TA_for_splunk/bin/sigsci_ta_for_splunk/aob_py3/urllib3/util/retry.py @@ -235,7 +235,7 @@ class Retry(object): RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) #: Default headers to be used for ``remove_headers_on_redirect`` - DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"]) #: Maximum backoff time. DEFAULT_BACKOFF_MAX = 120 diff --git a/sigsci_TA_for_splunk/default/addon_builder.conf b/sigsci_TA_for_splunk/default/addon_builder.conf index fb73604..c6ffe28 100644 --- a/sigsci_TA_for_splunk/default/addon_builder.conf +++ b/sigsci_TA_for_splunk/default/addon_builder.conf @@ -1,7 +1,7 @@ # this file is generated by add-on builder automatically # please do not edit it [base] -builder_version = 4.1.3 +builder_version = 4.1.4 builder_build = 0 is_edited = 1 diff --git a/sigsci_TA_for_splunk/default/app.conf b/sigsci_TA_for_splunk/default/app.conf index 2634a99..00d524a 100644 --- a/sigsci_TA_for_splunk/default/app.conf +++ b/sigsci_TA_for_splunk/default/app.conf @@ -3,11 +3,11 @@ state_change_requires_restart = false is_configured = 0 state = enabled -build = 15 +build = 18 [launcher] author = Fastly -version = 1.0.37 +version = 1.0.38 description = For users of Fastly who would like to enrich their Splunk data with information from Fastly. This app with simple configuration enabled the importing of Events, Activity, and raw request information to Splunk.This is an open source project, no support provided, public repository is available and installation documentation can be found at https://github.com/fastly/sigsci-splunk-app. The best way to report issues with the app is to create an Issue on the github page so that it can be tracked. [ui] diff --git a/sigsci_TA_for_splunk/default/collections.conf b/sigsci_TA_for_splunk/default/collections.conf index a45651e..4ca5b4c 100644 --- a/sigsci_TA_for_splunk/default/collections.conf +++ b/sigsci_TA_for_splunk/default/collections.conf @@ -1,2 +1,3 @@ [sigsci_TA_for_splunk_checkpointer] field.state = string + diff --git a/sigsci_TA_for_splunk/default/inputs.conf b/sigsci_TA_for_splunk/default/inputs.conf index f52f55f..40e6c97 100644 --- a/sigsci_TA_for_splunk/default/inputs.conf +++ b/sigsci_TA_for_splunk/default/inputs.conf @@ -3,30 +3,61 @@ start_by_shell = false python.version = python3 sourcetype = sigsci-event interval = 300 +disable_catchup = True +twenty_hour_catchup = False +request_timeout = 60 +read_timeout = 60 disabled = 0 -[SigsciRequests] +[SigsciActivity] start_by_shell = false python.version = python3 -sourcetype = sigsci-requests +sourcetype = sigsci-activity interval = 300 +disable_catchup = True +twenty_hour_catchup = False +request_timeout = 60 +read_timeout = 60 disabled = 0 -[SigsciActivity] +[SigsciRequests] start_by_shell = false python.version = python3 -sourcetype = sigsci-activity +sourcetype = sigsci-requests interval = 300 +request_limit = 1000 +disable_catchup = True +twenty_hour_catchup = False +attack_and_anomaly_signals_only = False +request_timeout = 60 +read_timeout = 60 disabled = 0 -[SigsciRequests://Demo_SigSciRequests] -interval = 300 -site_api_name = YourSite - [SigsciEvent://Demo_SigsciEvents] interval = 60 site_api_name = YourSite +disabled = 1 +disable_catchup = 0 +read_timeout = 60 +request_timeout = 60 +twenty_hour_catchup = False [SigsciActivity://Demo_SigSciActivity] interval = 60 +disabled = 1 +disable_catchup = 0 +read_timeout = 60 +request_timeout = 60 +twenty_hour_catchup = False + +[SigsciRequests://Demo_SigsciRequests] +disable_catchup = 1 +interval = 300 +read_timeout = 300 +request_limit = 1000 +request_timeout = 60 +site_api_name = YourSite +attack_and_anomaly_signals_only = False +twenty_hour_catchup = 0 +disabled = 1 diff --git a/sigsci_TA_for_splunk/default/restmap.conf b/sigsci_TA_for_splunk/default/restmap.conf index 86b94ee..ef46041 100644 --- a/sigsci_TA_for_splunk/default/restmap.conf +++ b/sigsci_TA_for_splunk/default/restmap.conf @@ -1,7 +1,7 @@ [admin:sigsci_TA_for_splunk] match = / -members = sigsci_TA_for_splunk_settings, sigsci_TA_for_splunk_SigsciEvent, sigsci_TA_for_splunk_SigsciRequests, sigsci_TA_for_splunk_SigsciActivity +members = sigsci_TA_for_splunk_settings, sigsci_TA_for_splunk_SigsciEvent, sigsci_TA_for_splunk_SigsciActivity, sigsci_TA_for_splunk_SigsciRequests [admin_external:sigsci_TA_for_splunk_settings] handlertype = python @@ -17,16 +17,16 @@ handlerfile = sigsci_TA_for_splunk_rh_SigsciEvent.py handleractions = edit, list, remove, create handlerpersistentmode = true -[admin_external:sigsci_TA_for_splunk_SigsciRequests] +[admin_external:sigsci_TA_for_splunk_SigsciActivity] handlertype = python python.version = python3 -handlerfile = sigsci_TA_for_splunk_rh_SigsciRequests.py +handlerfile = sigsci_TA_for_splunk_rh_SigsciActivity.py handleractions = edit, list, remove, create handlerpersistentmode = true -[admin_external:sigsci_TA_for_splunk_SigsciActivity] +[admin_external:sigsci_TA_for_splunk_SigsciRequests] handlertype = python python.version = python3 -handlerfile = sigsci_TA_for_splunk_rh_SigsciActivity.py +handlerfile = sigsci_TA_for_splunk_rh_SigsciRequests.py handleractions = edit, list, remove, create handlerpersistentmode = true diff --git a/sigsci_TA_for_splunk/default/transforms.conf b/sigsci_TA_for_splunk/default/transforms.conf new file mode 100644 index 0000000..26d8b90 --- /dev/null +++ b/sigsci_TA_for_splunk/default/transforms.conf @@ -0,0 +1,9 @@ +[siteRequestsLookup] +collection = sigsci_TA_for_splunk_checkpointer +external_type = kvstore +fields_list = _key,state + +[checkpointer_lookups] +collection = sigsci_TA_for_splunk_checkpointer +external_type = kvstore +fields_list = _key,state diff --git a/sigsci_TA_for_splunk/default/web.conf b/sigsci_TA_for_splunk/default/web.conf index 295b44c..98f630a 100644 --- a/sigsci_TA_for_splunk/default/web.conf +++ b/sigsci_TA_for_splunk/default/web.conf @@ -15,14 +15,6 @@ methods = POST, GET pattern = sigsci_TA_for_splunk_SigsciEvent/* methods = POST, GET, DELETE -[expose:sigsci_TA_for_splunk_SigsciRequests] -pattern = sigsci_TA_for_splunk_SigsciRequests -methods = POST, GET - -[expose:sigsci_TA_for_splunk_SigsciRequests_specified] -pattern = sigsci_TA_for_splunk_SigsciRequests/* -methods = POST, GET, DELETE - [expose:sigsci_TA_for_splunk_SigsciActivity] pattern = sigsci_TA_for_splunk_SigsciActivity methods = POST, GET @@ -31,6 +23,14 @@ methods = POST, GET pattern = sigsci_TA_for_splunk_SigsciActivity/* methods = POST, GET, DELETE +[expose:sigsci_TA_for_splunk_SigsciRequests] +pattern = sigsci_TA_for_splunk_SigsciRequests +methods = POST, GET + +[expose:sigsci_TA_for_splunk_SigsciRequests_specified] +pattern = sigsci_TA_for_splunk_SigsciRequests/* +methods = POST, GET, DELETE + [expose:_splunkd_data] pattern = data/* methods = GET diff --git a/sigsci_TA_for_splunk/sigsci_TA_for_splunk.aob_meta b/sigsci_TA_for_splunk/sigsci_TA_for_splunk.aob_meta index 75f3f4c..a650353 100644 --- a/sigsci_TA_for_splunk/sigsci_TA_for_splunk.aob_meta +++ b/sigsci_TA_for_splunk/sigsci_TA_for_splunk.aob_meta @@ -1 +1 @@ -{"basic_builder": {"appname": "sigsci_TA_for_splunk", "friendly_name": "Signal Sciences WAF TA", "version": "1.0.37", "author": "Fastly", "description": "For users of Fastly who would like to enrich their Splunk data with information from Fastly. This app with simple configuration enabled the importing of Events, Activity, and raw request information to Splunk.\n\nThis is an open source project, no support provided, public repository is available and installation documentation can be found at https://github.com/fastly/sigsci-splunk-app. The best way to report issues with the app is to create an Issue on the github page so that it can be tracked.", "theme": "#ea3f23", "large_icon": "iVBORw0KGgoAAAANSUhEUgAAAEgAAABICAYAAABV7bNHAAAAAXNSR0IArs4c6QAAD+5JREFUeF7tXAuMVNUZ/s69d96wCwULGoWGiFofNT6KsVEEhYqiEJAKiq0ixSpLa1EKKqAGBXV5RAlugkYXUFCK5dHEtKXl0QqrrNhoVojBRxsUUEGW3Z2dnZn7ar6zc8bLPnBn791BTU+y2dmz5/7n/P+c/z//4ztXuK7rIteSySRM04QQAvF4HOFwWP6ntrZWDZH9kUhE/m1ZFhoaGuRn9vF/bOl0Gk1NTfKzpmkoLS2FdeQIxJQpEP/+d55WZz6IK6+EePllqLWSRigUQrdu3fLk1HoNw0AikZBrsG0b9fX1+TE9e/bMf25sbEQ2m5V/e/mrq6uDKKaAtNGjoVVVdUYuXz8zfDiwefP/BdSuFIstoKNHj+ZVjNuU25Vax21HdWPzbsdUKoVMJiP7uYW7d+/eSq2i0ShisZjsdxwHcqumUkg8+CD0PXsgPvkE4tixgnaS27cv3D59YF1yCRoXLZIqxbWycZ1UOdXUetnP9XINuq6jpKSklRqyg2pIc0K+OV6pG02DKJaApD06cgTdIhGIu++G9pe/FCQg87e/hV1Whoymwendu3gCamxszO8gGjPVKE1lv9vrV0aYv73jaeT5oxq/QdVIKzRxIvT16wsT0IwZsObNa3NNaqd652hv7rb6vettyXfRjLRa2LFjx9B98uTCBTR9OuwFC0D1ZfvenWJ+BWRNmwZr4cLiC6iYNohCouFzx46FvmlTYSr20EMwZ8/OHxDfKyNdf+gQjJoaGFu3IpROQ9uwAeKzzwoSkHPJJbAvuwx2NAr78ssRGjoU4V69vrunGD3rTFUVIo8/DvHmmzyLIdJpWvOCBNNqMI0/PflwGM4558B6+GG4w4bJ47lLjvkgPekYT8GdO+Fu3gxRWQlx+LA/YXTwafeUU+BOmgQxZAgwdCiO5cKcb0+oYVmIb9qE8J/+1Lxb6uo6yFrAw2IxuIMHI33ttUjfdhuMaNR/LOY4Tn7P03tmAEq/gJ6w8lTpCavGfhXE2pkMslu2IDpzJrS9ewPm1h8559xzYS5eDGPoUGjhsAxWvd42DwvVqJ4qavDyx0C805608fHHCK9YgfDq1QWHDf5YL+DpaBSZ22+HOXEinAsvLF6ooVVWIlJeDrF/fwGrPXlD3dNOg/W738GYOTPv4XtTOCeMxZLJZF7FGNBRvehuMzxQoQaNHRsDTH3xYugVFUBj48njuDMzGwacm2+G9dRTcHv3hmXbeSqKb3aQbxUayf6OnmLawYOIP/AAQn/9KzNlnVnit+IZ68orkVqyBCWXXppfj/+E2eHDKL3sMmj//a9/P+ZbICY3kUD6wAHEcob6hAL6plDDePdddBs/vmg+TbHkR98p+dJLsAYN6nw+CG+/jfj06dDee69Y6y7qPM6PfoSmJ59EaMyY9hNmtbW1eSNNa06DTOOc3r8foenTEdq4saiLLvZk1rBhwLPPQh84UPLNYoPyiZgtbdtINzQgPncuwqtWFXu9J2U+86ab0LhwIdwePTpW1Qjv2IHE2LHf6dOqIEnrOjLl5UhNmtRaQJZl5VWM9Szr4EGUnnVWQfQLGkw/64c/lD/OqafCGj8e1uDBELW1iF9/vSTlbNuG5Omng19UaOVK6B9/DBw6BPHFF116ita/9x4iZ58tzQwbQ5DjQo1YJILwjTdC27q1IJ47NFgIOMznjBkD/YorYJ1/PtKAjJHYetTWwr3zTkAIJJ9+Gna/frKfaRP+iLffhr1jB7S//Q36li1dIijmnJL/+AfsXEqmVVUjtn49otOmsTTaIZ47Osjt3x9N8+cjO3QoSvv2hchVOul/KAH1pE+Sm7c+m80v0luxbWLJqb4e4W3bEHvoIQj6ZUE2w0Dm3nuRmj1bUpUCUqGG09CA2LhxMHbuDG5KTUP21lthLVoEN1caVtu3vXCGk1No3oqKqqp4wwAtmYQ+YwZC69fLmltQjX5R+pVXIPr0kaqWP8Wc5cshZs2CCCqXo+tofOYZWLfdJr8J1VjVIPOMc1gHVwLzBo8s8PH/bCxSqmwhUxEtqxrhTZsQv+8+acMCafE43PnzIX7/++aCJ/NBLgEIf/gDtOeeC2QONxZDauVKmMOHS+CAqr6SOAEEFBD7ybASkDfnxIS8V0A8PJQ9UgLy5nC46xO//jXE558Hsn5z7FiIpUuRoh/EUCNUVYUEkReHDvmewO3VC3Z5OXSmQGmYc6VnRbhHjx6yn2p0nA3yoC0oRGWbvDbIW/ZuWdXIrl2LGBN3AfCAWAzJFSsQHzcO4uiRI26UxMvKfAsHug7zV7+CvXgxojmGiyWgZH094i++iMicOUAOyuKHofTjjyMyYwZE/WefufGpU6H/+c9+6MlnnYEDkdy9W6ZqVVpWAQIUcdodtbNoX1TuhWGOanT3Vb+ipf3nP8hu3470iBEQJSXy6FfqyTSxAlQkBg+GHkDs6PzsZ3A3boRwDx1y3fPPh/jqK/8Cqq6G9tOftgmg8mOkrXXrEJs9G9qXX8KuqJDq217p2dq/H0b//r55QTyOut27Ier27nVLzj3XN0Hn4ouhvfOOpNMWwqwzAuIuspctQ2jmTB5nkF/k7t2yLnai2rx1110wnn/eN0/JrVshUqtXu7GJE/0Ri0TQuHo1wqNGSTo0sNz2bFQnpW78m9gbqh37qSLKx1EqwjFUK+PoUYglS6AtWiTpuOPHI11ZmVcrRtxKDSViJIcV4lhz507ER4yQQvXTzHnzIKyRI1399df90AGTT03btyNz6qmSTlsAqkJOsfTOnQjdfz/0XbvkYwQu6AsWoIlBZY7pE9XmS5JJiFGjoL37ri++7Ouug7AHDXK16mp/hMaMgblsGZpyqLJCBSSSScQ2b4axdi1QUiJjQen46TqcyZOBRYugde8uHcaOCKiHrsOeMcO3mjn9+0M4/fq5fss3ZlkZrEceAXO9Sq2U6qiQQn0D7FeVE/7P3rMHiVGjWvtgQoB03Vmz5A5l81ZaFB2pfrkqTH4Ox4FWUYHwrFm+vni3X79gBEQQgTtgAERpKbQ1awoy0jqdsTagMAxwsWcPsoZxwlBD2SwFA3aefBLOa69By2ah1dT4ExCzCEHsIIm2YKr2ggvg7NghU5YKCKkw12ql9J7VLmM4ER0xAtobb7RmJJGAXVd3HC2vf8WTUnnbkk4OeSbGjm3GPxL256l9dUZSLqGEQQjI/OUvJQTFLC2FedVV0olTC6ZaKLA5F8lAVIUadAjDS5YgMm9eM0Oelr3/fqTmzJEnoELMUijKBtHh9KJcVUAb+uc/ET98GHZVFYzKys7IJf+M27NnQAK6+24JsEznMnGFGGknk0H3sjLo69blF+Zccw3qXnlF7sqOxmLHwYCjUThPPAHtscf8CYg2yB4yxNW2b/dFyL7qKpgrV8LKIb645b15H6VunIQ7QoUa9JVU3ifU0AC7pgbagAGwiYnOrchLi+OVWnH3ePNEqhLBxyKNjdAYPhWIpG0pBPessyDS8+e7kVwGrdNSikSQfucdRM87T5IIypMmrW/KB7U00vy7rqYGJddfXzDMryX/zGWJzI4dbviKKzotG/Vg5rnnEJky5VshoMYNG5qrMj5bE1FyyX373MRPfuI7D02fIbtvn1yS1y+hOnmB6MqXUeBtBThXqsPn2/NxvLS8PpG3n88agwZB+PSiuY5UdTXEsX373BK61Cyt+GyZJUuQuuMO36FGZxJmykjHdu1C9LrrfHICuD/4AdyaGojagwfdBOvTS5f6J5pIyLKJceGFrS6zKOJdlVGkgLSvvkLJxRdD5O6w+WHIHj0a+sqVEOlUytU3boQxaZLv6Jc1Leuaa5BhAaBvX7m+YqmYQ5+KWILVq307iFy3+eKLsG6+ubmq4TKPc8cdEO+/70fozc8y4p42DU0PPgg3HM7fOFSEg65qNE+pI/7MMzDo9+SucPlhxGVWYs0a1F90EYRpmq5DZOucOQgtXuyH7tfPUki/+Q3Sc+dC79Yt71VzgLqqqfJEyoCrfo6hc6j66fsoH4e+lfKe6VtJw57JwFi6FLEFCwLZOZzfmjoV7hNPyKtXQsFfQv/6F2L33tuMIguiaRqsESPkVs3k7riSrLrUxlOIQlGnl/eym7faQcdShS30ibyhBukZEyZA27w5MMwk8QJNCxfCHDlSlquOhwFPmIAQJwuwEVLS9PLLyF56qVS5QIy0acLg5bwhQwJHvpnDhiH5xz9KCcjSs/dCHaqrEbvhBnYGKCJ55xHmz3+O7C23QKeHmwNpeS+vGakUuIsZtGYGD4aTK1Uf5/uYJuwtWxDesAGhTZuCx2fHYsi++iokqEr5Y8ehXOvrEZk/H6Hy8mAFlKPG6NghZnnkSGTvuguRM87I56vrq6vRfdw4idoQf/87cM458impVp9+isi6dQixZn7gALd9l6zPnjABdmUlwrnUSbvXwkuuvhp6AJ7oCbnQdemMOQx0eRc+k0GEhpZGcu5cWIYB4803ob/1FsSnnwZmgNtbE8Hmx95/vzWAquWdVbml9+6F8YtfBOJdd8lXHTBRAsvttWthsVjY4q5u23c1HAfmqlUI33OP7xgtYF66hJy7bJm8ie1qWsevhTd98YW8dhCuqAgUf9MlHHaWaCSC7KRJ0ObNg8E7Z23dm89ms3mMoirGUc1koqqhARp9o1df7RLIW2f5Cuo5d/hwOC+8gCxVLJe/pjOqoDd0Rr/xroZ8kMfza699f4REICmzhR98IGV9wqsI3h1EidHDVWXhPIiJ8LxZs6CvWUPoZ1Bf4MmhEwrBufVWmMuX58MZag43Qiu+M5mvQw2u1ou09yLOZRhA8MCKFdAefRTiyy9PDnN+Z00k4Dz8MMTUqajz3FhStwzbRNp/02UWrsn7chN71SqI8nJoe/b4XW5Rn3fPPhsuAVGTJ8ud0uELdel0Om+k2yvttkyH6vRmKypg8GJdwJDhwKUWjSJ7++3ysMl6cEPKfHC+9tK30tx09EKdWrh6Q5Nr28ju2oUQ3+Tis8QbuFBUaHPmmbCefVZe7HU07eS8gSr5+eeILl+O8MaN0IJIuAUgLefHP0Z29Gg4ZWWInnaav1d0NTQ05FVMFeNorJioUgAl9c4yGSd5inde4JLz0Uewt21D9KWXoL31VgBsFk6CN5vTU6YgOmwY7AED5PpVcbIlkMsL2FK+T0u+ZZGzUCPdkTdQhfbtQ+yRR6SgBN+CwDSo31dStCUvlrpjMdjnnYfUU0/BuuACOeo78wYq88gRaFu2wHrjDeiffCIzBBLs3Vlh0cHr0wd8aYAYOBC46CJg1CjYvXt33Su6Wl6HUg4Tt5eqryvICr8dL/yWYxUKnmOVKraCv0SjvBQiX9HVwBjvwAHoH34Io6pKXnGS15yYpPNAgTmXy1fp9O8vo2ybAunXD/FevUCwelM4DCsnaC/8RXnG/M1+8kFToFK8aiN6YcdUN4Wp9PJHbfkf65FaJpqTIWoAAAAASUVORK5CYII=", "small_icon": "iVBORw0KGgoAAAANSUhEUgAAACQAAAAkCAYAAADhAJiYAAAAAXNSR0IArs4c6QAABiNJREFUWEetWFtIVF0U/s6Zm859JCV8SoKgjMS0C0SUFQVJYQSlFUlJ9eBDZBFB+Wag9FJSkRQGQuRb9hDdKI2E/JUopKwISoiIUObijHN1zv75tjq/l3Eu+u+nxTlrr7X2Wmt/a62tCCGEpmnw+XxwuVyIRqMYHx+XNJfH44HVaoXBYEjQWm8vTHv2yP8Lru/f4bHb5W/KCoVCCIfDkh4bG0M8Hpe0EAJerxdOpxMTExNQAoGAUBQFer0esVgMqqpCp9NJmouGkJEbSVOQ6O2FeffulPaEhoagW7lS8lAWZVI2aeqiTtLTuqUxigLF6/UKMprNZgQCAanUZDJJmove4eloCGmekputT59CV1OT1KjIq1cIl5VJfi7KokzKJk1d1EmaRlgsFhkVGq0sJmQUHOroQG5tbXKDnjyBae9eGeKsQ+Z2u6WHHA6HFGA0GqXFM4X5/X7pFcacp6KrbY8eQV9Xl9Sg8IsXCJWXz8rD3Nxc5OTkSLk2m02GjTQ9xPxhHslQRiIROkkq5AfSDA+9MDP+IhqF6fRpqF1dQCSSOqGn/xoM0KqqELl3j8kIXp65OTlXd9qQBd6+haW2FsrPn5kZsQCXKCzEeGcnrFu3pr5lC4ZsZASOykqoAwNLMmTuZm3dOoh//oFuKnzzQhaPx2XImCd2u13mB2+Vc/nyzEOTrcmqCt/fv7Dl5cmdxCXmFVNlPg719yN31y5A07JVkzV/uLsb2saNMncTODQzZL7hYTiKirIWvJQN/k+fYCsu/u+WJXDI7YaroAAQYinys9+rKPD++QNHQcGklxJIvW8fdG/eZCxQ27AB0Y4OhJ1OOM6cQVzTEGhvhzMYhHboENT+/sxllZYi0NMzidQej0cotHDNmowERI8fR/DGDQmkLCORSETSTEiCJmmelKXAfv481Pb2jOT6BgehKyqaLB1QlIw2BZubYWhoSFR+Ii8RmIjL0/GWkmaS8taQNj18CHN9fUbyY9EoFPfoqHAtW5Z2Q+TmTZjq6xOlY25LQa8QOvh9Ji2La08PrBUVaXWMu91Q4leuCLWpKSWztn49MDAgFdIj9ABp1j1WcdL0EKu4xLMtW6C9e4fAFHTQW1plJfTPnqXUE6urgxKrqBD67u6UjBPd3dBv3z6vWZsbMjYbKvPp8GHoOjtnFejYly8wpMnTeFkZFC0vTyhu98IG6XTwezzzqv3MkDmGhoDRUahVVdBqaqB1dCTCR8HMJVnt2UFONX7JFAqHA4owmwWCwYUNMpkQ+/oVhhUrknrIeOAAdM+fy/3RixcxfunSrKTmdxof/vEDOWvXAqHQwrpyczMwSFWhbduG8cePZT/DnpttBGkmb05hIcFMKmHTpq+ulv/JRx4uwoNlxw6o79+nBV5FOBwCPt/CVhuN8I+OLhiy6K9fsNy+DaWkBGNVVUlvWSJkTmfqgm02Q5nYvFno+vpSZ7/bDYPLlTapk+FQImQ+H3I4yaQoTdrq1VAiR48K44MHKQ2KnDqFYEvLrBY2GxySo1RjIwy3bqWGl8pKKL7Pn4W9uDgtaPm+fYNj1apFAaN3eBjODLqI8IcP2ZUO7+/fsOTnZ1U6ZMjy84F4PO2hJ0uH2y2M3d2wHDyYdgNrXuD1a8RKSuZNo8lKh21kBHqCYQbGjLe1QRw5MjWXxeNQOdSFw+mNAiBsNoz19UlsYrmInjsHPWW0tsrEN4yOwlJeDmVsLCN5MBohQiFMsIWdbtACg4Owl5ZmJmCaS1EgXC4oU2AnCGwcDrNs8gIvX8Kyc+dkgzazhY00NcHU2JidUUvkJrobW1qStLBTrx/ayZNQ799foprMtov9+4GurtmvH36/X75+cKIk3HOszmlogNrWlpnURXJp1dUI3r0rd7ONmX4JmRWymbP9WH8/7Js2LVJd6m3i40dZapLO9qleP1gkY5cvw9Tc/L8YFj17FoZr1+Cfet5J+mCVyetHgFf5wgWYOjqAiYmsjYucOAG0tsJktaZ//Zgeg/gEw/aTucS2gTQX289gMCinCtIcs03Xr0O9c4djLxT2UtNTLmHAYoGwWiGOHYNy9aqUQVlsdZkrpIldbHlJM3/5sMWJhd/+BXe3XAACKxnuAAAAAElFTkSuQmCC", "visible": true, "tab_version": "4.1.3", "tab_build_no": "0", "build_no": 15}, "data_input_builder": {"datainputs": [{"index": "default", "sourcetype": "sigsci-event", "interval": "300", "use_external_validation": true, "streaming_mode_xml": true, "name": "SigsciEvent", "title": "SigsciEvent", "description": "", "type": "customized", "parameters": [{"name": "site_api_name", "label": "Site API Name", "help_string": "This is the Site API Name. It should not be a URL.", "required": true, "format_type": "text", "default_value": "", "placeholder": "", "type": "text", "value": "jeremycx"}], "data_inputs_options": [{"type": "customized_var", "name": "site_api_name", "title": "Site API Name", "description": "This is the Site API Name. It should not be a URL.", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "", "placeholder": ""}], "code": "# encoding = utf-8\nfrom timeit import default_timer as timer\nimport requests\nimport json\nfrom datetime import datetime\nfrom sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time\n\n\"\"\"\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n\"\"\"\n\n\n# def use_single_instance_mode():\n# return True\n\n\ndef validate_input(helper, definition):\n # This example accesses the modular input variable\n site_name = definition.parameters.get(\"site_api_name\", None)\n if site_name is None or site_name == \"\":\n msg = \"The site_name can not be empty\"\n raise ValueError(\"InvalidSiteName\", msg)\n elif \"http\" in site_name:\n msg = (\n \"The site name is not the full URL it should be the \",\n \"API Name of the site like 'my_example_site'\",\n )\n raise ValueError(\"InvalidSiteName\", msg)\n elif \" \" in site_name:\n msg = (\n \"The site name should be the API Name of the site like \",\n \"not the Display Name. Example would be 'my_site_name' instead of \",\n \"My Site Name\",\n )\n raise ValueError(\"InvalidSiteName\", msg)\n pass\n\n\ndef collect_events(helper, ew):\n start = timer()\n loglevel = helper.get_log_level()\n # Proxy setting configuration\n # proxy_settings = helper.get_proxy()\n global_email = helper.get_global_setting(\"email\")\n global_api_token = helper.get_global_setting(\"api_token\")\n global_corp_api_name = helper.get_global_setting(\"corp_api_name\")\n api_host = \"https://dashboard.signalsciences.net\"\n helper.log_info(\"email: %s\" % global_email)\n helper.log_info(\"corp: %s\" % global_corp_api_name)\n\n def pull_events(current_site, delta, key=None):\n site_name = current_site\n last_name = f\"events_last_until_time_{current_site}\"\n last_run_until = helper.get_check_point(last_name)\n helper.log_info(f\"last_run_until: {last_run_until}\")\n if last_run_until is None:\n (until_time, from_time) = get_from_and_until_times(\n delta, five_min_offset=False\n )\n else:\n (until_time, from_time) = get_until_time(\n helper, last_run_until, delta, five_min_offset=False\n )\n if from_time is None or from_time > until_time:\n helper.log_info(f\"{from_time} >= current now time, skipping run\")\n return\n if from_time >= until_time:\n helper.save_check_point(last_name, from_time)\n helper.log_info(\n f\"from_time {from_time} >= until_time {until_time}, skipping run\"\n )\n return\n helper.save_check_point(last_name, until_time)\n helper.log_info(\"SiteName: %s\" % site_name)\n\n helper.log_info(f\"Start Period: {datetime.fromtimestamp(from_time)}\")\n helper.log_info(f\"End Period: {datetime.fromtimestamp(until_time)}\")\n\n input_name = helper.get_input_stanza_names()\n single_name = \"\"\n\n if type(input_name) is dict and input_name > 1:\n helper.log_info(\"Multi instance mode\")\n for current_name in input_name:\n single_name = current_name\n else:\n helper.log_info(\"Single instance mode\")\n helper.log_info(\"Inputs: %s\" % input_name)\n helper.log_info(\"Inputs Num: %s\" % len(input_name))\n single_name = input_name\n helper.log_info(f\"single_name: {single_name}\")\n\n # Loop across all the data and output it in one big JSON object\n url = (\n f\"{api_host}/api/v0/corps/{global_corp_api_name}\"\n f\"/sites/{site_name}/activity?\"\n f\"from={from_time}&until={until_time}\"\n )\n helper.log_info(\"Pulling results from Events API\")\n config = Config(\n url=url,\n api_host=api_host,\n from_time=from_time,\n until_time=until_time,\n global_email=global_email,\n global_corp_api_name=global_corp_api_name,\n current_site=current_site,\n )\n config.headers = {\n \"Content-type\": \"application/json\",\n \"x-api-user\": global_email,\n \"x-api-token\": global_api_token,\n \"User-Agent\": config.user_agent_string,\n }\n all_events = get_results(\"Events\", helper, config)\n total_requests = len(all_events)\n helper.log_info(\"Total Events Pulled: %s\" % total_requests)\n write_start = timer()\n for current_event in all_events:\n helper.log_debug(current_event)\n if key is None:\n source_type = helper.get_sourcetype()\n helper.log_info(\"Concurrent Mode\")\n source_type_info = type(source_type)\n active_index = helper.get_output_index()\n index_info = type(active_index)\n single_name_info = type(single_name)\n current_event_info = type(current_event)\n helper.log_info(f\"source_type: {source_type}\")\n helper.log_info(f\"source_type_info: {source_type_info}\")\n helper.log_info(f\"index: {active_index}\")\n helper.log_info(f\"index_info: {index_info}\")\n helper.log_info(f\"single_name: {single_name}\")\n helper.log_info(f\"single_name_info: {single_name_info}\")\n helper.log_info(f\"current_event: {current_event}\")\n helper.log_info(f\"current_event_info: {current_event_info}\")\n event = helper.new_event(\n source=single_name,\n index=helper.get_output_index(),\n sourcetype=source_type,\n data=current_event,\n )\n else:\n indexes = helper.get_output_index()\n current_index = indexes[key]\n types = helper.get_sourcetype()\n source_type = types[key]\n single_name = single_name[0]\n helper.log_info(\"Sequential Mode\")\n helper.log_info(f\"source_type: {source_type}\")\n helper.log_info(f\"index: {current_index}\")\n helper.log_info(f\"single_name: {single_name}\")\n helper.log_info(f\"current_event: {current_event}\")\n event = helper.new_event(\n source=single_name,\n index=current_index,\n sourcetype=source_type,\n data=current_event,\n )\n\n try:\n ew.write_event(event)\n except Exception as e:\n raise e\n write_end = timer()\n write_time = write_end - write_start\n write_time_result = round(write_time, 2)\n helper.log_info(\"Total Event Output Time: %s seconds\" % write_time_result)\n\n # If multiple inputs configured it creates an array of values and the\n # script only gets called once per Input configuration\n\n all_sites = helper.get_arg(\"site_api_name\")\n time_deltas = helper.get_arg(\"interval\")\n helper.log_info(f\"interval: {time_deltas}\")\n if type(all_sites) is dict:\n helper.log_info(\"run_type: Sequential\")\n for active_input in all_sites:\n site = all_sites[active_input]\n current_delta = int(time_deltas[active_input])\n helper.log_info(\"site: %s\" % site)\n pull_events(key=active_input, current_site=site, delta=current_delta)\n helper.log_info(\"Finished Pulling Events for %s\" % site)\n else:\n helper.log_info(\"Run Type: Concurrent\")\n site = helper.get_arg(\"site_api_name\")\n helper.log_info(\"site: %s\" % site)\n pull_events(current_site=site, delta=int(time_deltas))\n helper.log_info(\"Finished Pulling Events for %s\" % site)\n end = timer()\n total_time = end - start\n time_result = round(total_time, 2)\n helper.log_info(\"Total Script Time: %s seconds\" % time_result)\n", "customized_options": [{"name": "site_api_name", "value": "yoursite"}], "uuid": "294ad5bbdf92407b9a6785b46106152a", "sample_count": 0}, {"index": "default", "sourcetype": "sigsci-requests", "interval": "300", "use_external_validation": true, "streaming_mode_xml": true, "name": "SigsciRequests", "title": "SigsciRequests", "description": "", "type": "customized", "parameters": [{"name": "site_api_name", "label": "Site API Name", "help_string": "This is the API Name of the site to pull request data from. This should not be a URL.", "required": true, "format_type": "text", "default_value": "", "placeholder": "", "type": "text", "value": "jeremycx"}], "data_inputs_options": [{"type": "customized_var", "name": "site_api_name", "title": "Site API Name", "description": "This is the API Name of the site to pull request data from. This should not be a URL.", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "", "placeholder": ""}], "customized_options": [{"name": "site_api_name", "value": "yoursite"}], "code": "# encoding = utf-8\nfrom timeit import default_timer as timer\nfrom datetime import datetime, timezone, timedelta\nfrom sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time\n\n\"\"\"\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n\"\"\"\n\n# def use_single_instance_mode():\n# return True\n\n\ndef validate_input(helper, definition):\n # This example accesses the modular input variable\n site_name = definition.parameters.get(\"site_api_name\", None)\n if site_name is None or site_name == \"\":\n msg = \"The site_name can not be empty\"\n raise ValueError(\"InvalidSiteName\", msg)\n elif \"http\" in site_name:\n msg = (\n \"The site name is not the full URL it should be the \",\n \"API Name of the site like 'my_example_site'\",\n )\n raise ValueError(\"InvalidSiteName\", msg)\n elif \" \" in site_name:\n msg = (\n \"The site name should be the API Name of the site like \",\n \"not the Display Name. Example would be 'my_site_name' instead of \",\n \"My Site Name\",\n )\n raise ValueError(\"InvalidSiteName\", msg)\n pass\n\n\ndef collect_events(helper, ew):\n start = timer()\n loglevel = helper.get_log_level()\n helper.set_log_level(loglevel)\n # Proxy setting configuration\n # proxy_settings = helper.get_proxy()\n global_email = helper.get_global_setting(\"email\")\n global_api_token = helper.get_global_setting(\"api_token\")\n global_corp_api_name = helper.get_global_setting(\"corp_api_name\")\n api_host = \"https://dashboard.signalsciences.net\"\n helper.log_info(\"email: %s\" % global_email)\n helper.log_info(\"corp: %s\" % global_corp_api_name)\n\n def pull_requests(helper, current_site, delta, key=None):\n site_name = current_site\n last_name = f\"requests_last_until_time_{current_site}\"\n last_run_until = helper.get_check_point(last_name)\n\n if last_run_until is None:\n helper.log_info(\"no last_run_time found in checkpoint state\")\n helper.log_debug(\"get_from_until\")\n until_time, from_time = get_from_and_until_times(\n delta, five_min_offset=True\n )\n else:\n helper.log_info(f\"last_run_until found in state: {last_run_until}\")\n helper.log_debug(\"get_until\")\n until_time, from_time = get_until_time(\n helper, last_run_until, delta, five_min_offset=True\n )\n\n if from_time is None:\n helper.log_info(f\"{last_run_until} >= current now time, skipping run\")\n return\n\n if from_time >= until_time:\n helper.save_check_point(last_name, from_time)\n helper.log_info(\n f\"from_time {from_time} >= until_time {until_time}, skipping run\"\n )\n return\n helper.log_info(\"SiteName: %s\" % site_name)\n helper.log_info(f\"Start Period: {datetime.utcfromtimestamp(from_time)} UTC\")\n helper.log_info(f\"End Period: {datetime.utcfromtimestamp(until_time)} UTC\")\n\n input_name = helper.get_input_stanza_names()\n single_name = \"\"\n\n if type(input_name) is dict and input_name > 1:\n helper.log_info(\"Multi instance mode\")\n for current_name in input_name:\n single_name = current_name\n else:\n helper.log_info(\"Single instance mode\")\n helper.log_info(\"Inputs: %s\" % input_name)\n helper.log_info(\"Inputs Num: %s\" % len(input_name))\n single_name = input_name\n helper.log_info(f\"single_name: {single_name}\")\n\n # Loop across all the data and output it in one big JSON object\n url = (\n f\"{api_host}/api/v0/corps/{global_corp_api_name}\"\n f\"/sites/{site_name}/feed/requests?\"\n f\"from={from_time}&until={until_time}\"\n )\n helper.log_info(\"Pulling requests from requests API\")\n config = Config(\n url=url,\n api_host=api_host,\n from_time=from_time,\n until_time=until_time,\n global_email=global_email,\n global_corp_api_name=global_corp_api_name,\n current_site=current_site,\n )\n config.headers = {\n \"Content-type\": \"application/json\",\n \"x-api-user\": global_email,\n \"x-api-token\": global_api_token,\n \"User-Agent\": config.user_agent_string,\n }\n\n all_requests = get_results(\"Requests\", helper, config)\n\n total_requests = len(all_requests)\n helper.log_info(\"Total Requests Pulled: %s\" % total_requests)\n if total_requests == 0:\n helper.save_check_point(last_name, until_time)\n helper.log_info(\n f\"No events to write, saving checkpoint to value:{until_time}\"\n )\n write_start = timer()\n for current_event in all_requests:\n if key is None:\n source_type = helper.get_sourcetype()\n event = helper.new_event(\n source=single_name,\n index=helper.get_output_index(),\n sourcetype=source_type,\n data=current_event,\n )\n else:\n indexes = helper.get_output_index()\n current_index = indexes[key]\n types = helper.get_sourcetype()\n source_type = types[key]\n single_name = single_name[0]\n event = helper.new_event(\n source=single_name,\n index=current_index,\n sourcetype=source_type,\n data=current_event,\n )\n\n try:\n ew.write_event(event)\n helper.save_check_point(last_name, until_time)\n helper.log_info(f\"Event written, saving checkpoint:{until_time}\")\n except Exception as e:\n helper.log_error(f\"error writing event: {e}\")\n helper.log_error(event)\n raise e\n\n write_end = timer()\n write_time = write_end - write_start\n write_time_result = round(write_time, 2)\n helper.log_info(\"Total Event Output Time: %s seconds\" % write_time_result)\n\n # If multiple inputs configured it creates an array of values and the\n # script only gets called once per Input configuration\n all_sites = helper.get_arg(\"site_api_name\")\n time_deltas = helper.get_arg(\"interval\")\n helper.log_info(f\"interval: {time_deltas}\")\n\n if type(all_sites) is dict:\n helper.log_info(\"run_type: Sequential\")\n for active_input, site in all_sites.items():\n time_delta = int(time_deltas[active_input])\n helper.log_info(\"site: %s\" % site)\n pull_requests(helper, key=active_input, current_site=site, delta=time_delta)\n helper.log_info(\"Finished Pulling Requests for %s\" % site)\n else:\n helper.log_info(\"Run Type: Concurrent\")\n site = helper.get_arg(\"site_api_name\")\n helper.log_info(\"site: %s\" % site)\n pull_requests(helper, current_site=site, delta=int(time_deltas))\n helper.log_info(\"Finished Pulling Requests for %s\" % site)\n end = timer()\n total_time = end - start\n time_result = round(total_time, 2)\n helper.log_info(\"Total Script Time: %s seconds\" % time_result)\n", "uuid": "aaaeb391da9043e1819408033d9db708", "sample_count": 0}, {"index": "default", "sourcetype": "sigsci-activity", "interval": "300", "use_external_validation": true, "streaming_mode_xml": true, "name": "SigsciActivity", "title": "SigsciActivity", "description": "", "type": "customized", "parameters": [{"name": "place_holder", "label": "Place Holder", "help_string": "It was required to have one option even if it isn't needed. You can skip this one.", "required": false, "format_type": "text", "default_value": "", "placeholder": "Not needed", "type": "text", "value": ""}], "data_inputs_options": [{"type": "customized_var", "name": "place_holder", "title": "Place Holder", "description": "It was required to have one option even if it isn't needed. You can skip this one.", "required_on_edit": false, "required_on_create": false, "format_type": "text", "default_value": "", "placeholder": "Not needed"}], "code": "# encoding = utf-8\nfrom timeit import default_timer as timer\nimport json\nfrom datetime import datetime\nfrom sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time\n\n\"\"\"\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n\"\"\"\n\n\n# def use_single_instance_mode():\n# return True\n\n\ndef validate_input(helper, definition):\n # This example accesses the modular input variable\n pass\n\n\ndef collect_events(helper, ew):\n start = timer()\n loglevel = helper.get_log_level()\n helper.set_log_level(loglevel)\n # Proxy setting configuration\n # proxy_settings = helper.get_proxy()\n global_email = helper.get_global_setting(\"email\")\n global_api_token = helper.get_global_setting(\"api_token\")\n global_corp_api_name = helper.get_global_setting(\"corp_api_name\")\n api_host = \"https://dashboard.signalsciences.net\"\n helper.log_info(\"email: %s\" % global_email)\n helper.log_info(\"corp: %s\" % global_corp_api_name)\n\n def pull_events(delta, key=None):\n last_run_until = helper.get_check_point(\"activity_last_until_time\")\n helper.log_info(f\"last_run_until: {last_run_until}\")\n if last_run_until is None:\n (until_time, from_time) = get_from_and_until_times(\n delta, five_min_offset=False\n )\n else:\n (until_time, from_time) = get_until_time(\n helper, last_run_until, delta, five_min_offset=False\n )\n if from_time is None:\n helper.log_info(f\"{last_run_until} >= current now time, skipping run\")\n return\n if from_time >= until_time:\n helper.save_check_point(\"activity_last_until_time\", from_time)\n helper.log_info(\n f\"from_time {from_time} >= until_time {until_time}, skipping run\"\n )\n return\n helper.save_check_point(\"activity_last_until_time\", until_time)\n\n helper.log_info(f\"Start Period: {datetime.fromtimestamp(from_time)}\")\n helper.log_info(f\"End Period: {datetime.fromtimestamp(until_time)}\")\n\n input_name = helper.get_input_stanza_names()\n single_name = \"\"\n\n if type(input_name) is dict and input_name > 1:\n helper.log_info(\"Multi instance mode\")\n for current_name in input_name:\n single_name = current_name\n else:\n helper.log_info(\"Single instance mode\")\n helper.log_info(\"Inputs: %s\" % input_name)\n helper.log_info(\"Inputs Num: %s\" % len(input_name))\n single_name = input_name\n helper.log_info(f\"single_name: {single_name}\")\n\n # Loop across all the data and output it in one big JSON object\n url = (\n f\"{api_host}/api/v0/corps/{global_corp_api_name}\"\n f\"/activity?\"\n f\"from={from_time}&until={until_time}\"\n )\n config = Config(\n url=url,\n api_host=api_host,\n from_time=from_time,\n until_time=until_time,\n global_email=global_email,\n global_corp_api_name=global_corp_api_name,\n current_site=\"\",\n )\n config.headers = {\n \"Content-type\": \"application/json\",\n \"x-api-user\": global_email,\n \"x-api-token\": global_api_token,\n \"User-Agent\": config.user_agent_string,\n }\n helper.log_info(\"Pulling results from Corp Activity API\")\n all_events = get_results(\"Activity Events\", helper, config)\n total_requests = len(all_events)\n helper.log_info(\"Total Corp Activity Pulled: %s\" % total_requests)\n write_start = timer()\n for current_event in all_events:\n if key is None:\n source_type = helper.get_sourcetype()\n helper.log_info(\"Concurrent Mode\")\n source_type_info = type(source_type)\n active_index = helper.get_output_index()\n index_info = type(active_index)\n single_name_info = type(single_name)\n current_event_info = type(current_event)\n helper.log_info(f\"source_type: {source_type}\")\n helper.log_info(f\"source_type_info: {source_type_info}\")\n helper.log_info(f\"index: {active_index}\")\n helper.log_info(f\"index_info: {index_info}\")\n helper.log_info(f\"single_name: {single_name}\")\n helper.log_info(f\"single_name_info: {single_name_info}\")\n helper.log_info(f\"current_event: {current_event}\")\n helper.log_info(f\"current_event_info: {current_event_info}\")\n event = helper.new_event(\n source=single_name,\n index=helper.get_output_index(),\n sourcetype=source_type,\n data=current_event,\n )\n else:\n indexes = helper.get_output_index()\n current_index = indexes[key]\n types = helper.get_sourcetype()\n source_type = types[key]\n single_name = single_name[0]\n helper.log_info(\"Sequential Mode\")\n helper.log_info(f\"source_type: {source_type}\")\n helper.log_info(f\"index: {current_index}\")\n helper.log_info(f\"single_name: {single_name}\")\n helper.log_info(f\"current_event: {current_event}\")\n event = helper.new_event(\n source=single_name,\n index=current_index,\n sourcetype=source_type,\n data=current_event,\n )\n\n try:\n ew.write_event(event)\n except Exception as e:\n raise e\n write_end = timer()\n write_time = write_end - write_start\n write_time_result = round(write_time, 2)\n helper.log_info(f\"Total Corp Activity Output Time: {write_time_result} seconds\")\n\n # If multiple inputs configured it creates an array of values and the\n # script only gets called once per Input configuration\n time_deltas = helper.get_arg(\"interval\")\n helper.log_info(f\"interval: {time_deltas}\")\n if type(time_deltas) is dict:\n helper.log_info(\"run_type: Sequential\")\n for active_input in time_deltas:\n time_delta = time_deltas[active_input]\n time_delta = int(time_delta)\n helper.log_info(\"time_delta: %s\" % time_delta)\n pull_events(delta=time_delta, key=active_input)\n else:\n helper.log_info(\"Run Type: Concurrent\")\n helper.log_info(\"time_delta: %s\" % time_deltas)\n pull_events(delta=int(time_deltas))\n helper.log_info(\"Finished Pulling Corp Activity\")\n end = timer()\n total_time = end - start\n time_result = round(total_time, 2)\n helper.log_info(f\"Total Script Time: {time_result} seconds\")\n", "customized_options": [{"name": "place_holder", "value": ""}], "uuid": "07b37d1943e942cf831c8ee85ffcb4a2", "sample_count": 0}]}, "field_extraction_builder": {"sigsci-event": {"is_parsed": true, "data_format": "json"}, "sigsci-activity": {"is_parsed": true, "data_format": "json"}, "sigsci-requests": {"is_parsed": true, "data_format": "json"}}, "global_settings_builder": {"global_settings": {"proxy_settings": {"proxy_type": "http"}, "log_settings": {"log_level": "DEBUG"}, "customized_settings": [{"required": true, "name": "email", "label": "E-mail", "default_value": "", "placeholder": "example@example.com", "help_string": "This it the e-mail username of the user who has the correct permissions for the app to pull the data.", "type": "text", "format_type": "text", "value": "youremail@yourdomain.com"}, {"required": true, "name": "corp_api_name", "label": "Corp", "default_value": "", "placeholder": "", "help_string": "This is the API name of your corp.", "type": "text", "format_type": "text", "value": "corpname"}, {"required": true, "name": "api_token", "label": "API Token", "placeholder": "", "default_value": "", "help_string": "This is the API Token of the user who has the correct permissions. The API Token is tied to the username.", "type": "password", "format_type": "password", "value": ""}]}}, "sourcetype_builder": {"sigsci-requests": {"metadata": {"event_count": 0, "data_input_name": "SigsciRequests", "extractions_count": 0, "cims_count": 0}}, "sigsci-event": {"metadata": {"event_count": 0, "data_input_name": "SigsciEvent", "extractions_count": 0, "cims_count": 0}}, "sigsci-activity": {"metadata": {"event_count": 0, "data_input_name": "SigsciActivity", "extractions_count": 0, "cims_count": 0}}}, "validation": {"validators": ["best_practice_validation", "data_model_mapping_validation", "field_extract_validation", "app_cert_validation"], "status": "job_started", "validation_id": "v_1697131153_85"}} \ No newline at end of file +{"basic_builder": {"appname": "sigsci_TA_for_splunk", "friendly_name": "Signal Sciences WAF TA", "version": "1.0.38", "author": "Fastly", "description": "For users of Fastly who would like to enrich their Splunk data with information from Fastly. This app with simple configuration enabled the importing of Events, Activity, and raw request information to Splunk.\n\nThis is an open source project, no support provided, public repository is available and installation documentation can be found at https://github.com/fastly/sigsci-splunk-app. The best way to report issues with the app is to create an Issue on the github page so that it can be tracked.", "theme": "#ea3f23", "large_icon": "iVBORw0KGgoAAAANSUhEUgAAAEgAAABICAYAAABV7bNHAAAAAXNSR0IArs4c6QAAD+5JREFUeF7tXAuMVNUZ/s69d96wCwULGoWGiFofNT6KsVEEhYqiEJAKiq0ixSpLa1EKKqAGBXV5RAlugkYXUFCK5dHEtKXl0QqrrNhoVojBRxsUUEGW3Z2dnZn7ar6zc8bLPnBn791BTU+y2dmz5/7n/P+c/z//4ztXuK7rIteSySRM04QQAvF4HOFwWP6ntrZWDZH9kUhE/m1ZFhoaGuRn9vF/bOl0Gk1NTfKzpmkoLS2FdeQIxJQpEP/+d55WZz6IK6+EePllqLWSRigUQrdu3fLk1HoNw0AikZBrsG0b9fX1+TE9e/bMf25sbEQ2m5V/e/mrq6uDKKaAtNGjoVVVdUYuXz8zfDiwefP/BdSuFIstoKNHj+ZVjNuU25Vax21HdWPzbsdUKoVMJiP7uYW7d+/eSq2i0ShisZjsdxwHcqumUkg8+CD0PXsgPvkE4tixgnaS27cv3D59YF1yCRoXLZIqxbWycZ1UOdXUetnP9XINuq6jpKSklRqyg2pIc0K+OV6pG02DKJaApD06cgTdIhGIu++G9pe/FCQg87e/hV1Whoymwendu3gCamxszO8gGjPVKE1lv9vrV0aYv73jaeT5oxq/QdVIKzRxIvT16wsT0IwZsObNa3NNaqd652hv7rb6vettyXfRjLRa2LFjx9B98uTCBTR9OuwFC0D1ZfvenWJ+BWRNmwZr4cLiC6iYNohCouFzx46FvmlTYSr20EMwZ8/OHxDfKyNdf+gQjJoaGFu3IpROQ9uwAeKzzwoSkHPJJbAvuwx2NAr78ssRGjoU4V69vrunGD3rTFUVIo8/DvHmmzyLIdJpWvOCBNNqMI0/PflwGM4558B6+GG4w4bJ47lLjvkgPekYT8GdO+Fu3gxRWQlx+LA/YXTwafeUU+BOmgQxZAgwdCiO5cKcb0+oYVmIb9qE8J/+1Lxb6uo6yFrAw2IxuIMHI33ttUjfdhuMaNR/LOY4Tn7P03tmAEq/gJ6w8lTpCavGfhXE2pkMslu2IDpzJrS9ewPm1h8559xzYS5eDGPoUGjhsAxWvd42DwvVqJ4qavDyx0C805608fHHCK9YgfDq1QWHDf5YL+DpaBSZ22+HOXEinAsvLF6ooVVWIlJeDrF/fwGrPXlD3dNOg/W738GYOTPv4XtTOCeMxZLJZF7FGNBRvehuMzxQoQaNHRsDTH3xYugVFUBj48njuDMzGwacm2+G9dRTcHv3hmXbeSqKb3aQbxUayf6OnmLawYOIP/AAQn/9KzNlnVnit+IZ68orkVqyBCWXXppfj/+E2eHDKL3sMmj//a9/P+ZbICY3kUD6wAHEcob6hAL6plDDePdddBs/vmg+TbHkR98p+dJLsAYN6nw+CG+/jfj06dDee69Y6y7qPM6PfoSmJ59EaMyY9hNmtbW1eSNNa06DTOOc3r8foenTEdq4saiLLvZk1rBhwLPPQh84UPLNYoPyiZgtbdtINzQgPncuwqtWFXu9J2U+86ab0LhwIdwePTpW1Qjv2IHE2LHf6dOqIEnrOjLl5UhNmtRaQJZl5VWM9Szr4EGUnnVWQfQLGkw/64c/lD/OqafCGj8e1uDBELW1iF9/vSTlbNuG5Omng19UaOVK6B9/DBw6BPHFF116ita/9x4iZ58tzQwbQ5DjQo1YJILwjTdC27q1IJ47NFgIOMznjBkD/YorYJ1/PtKAjJHYetTWwr3zTkAIJJ9+Gna/frKfaRP+iLffhr1jB7S//Q36li1dIijmnJL/+AfsXEqmVVUjtn49otOmsTTaIZ47Osjt3x9N8+cjO3QoSvv2hchVOul/KAH1pE+Sm7c+m80v0luxbWLJqb4e4W3bEHvoIQj6ZUE2w0Dm3nuRmj1bUpUCUqGG09CA2LhxMHbuDG5KTUP21lthLVoEN1caVtu3vXCGk1No3oqKqqp4wwAtmYQ+YwZC69fLmltQjX5R+pVXIPr0kaqWP8Wc5cshZs2CCCqXo+tofOYZWLfdJr8J1VjVIPOMc1gHVwLzBo8s8PH/bCxSqmwhUxEtqxrhTZsQv+8+acMCafE43PnzIX7/++aCJ/NBLgEIf/gDtOeeC2QONxZDauVKmMOHS+CAqr6SOAEEFBD7ybASkDfnxIS8V0A8PJQ9UgLy5nC46xO//jXE558Hsn5z7FiIpUuRoh/EUCNUVYUEkReHDvmewO3VC3Z5OXSmQGmYc6VnRbhHjx6yn2p0nA3yoC0oRGWbvDbIW/ZuWdXIrl2LGBN3AfCAWAzJFSsQHzcO4uiRI26UxMvKfAsHug7zV7+CvXgxojmGiyWgZH094i++iMicOUAOyuKHofTjjyMyYwZE/WefufGpU6H/+c9+6MlnnYEDkdy9W6ZqVVpWAQIUcdodtbNoX1TuhWGOanT3Vb+ipf3nP8hu3470iBEQJSXy6FfqyTSxAlQkBg+GHkDs6PzsZ3A3boRwDx1y3fPPh/jqK/8Cqq6G9tOftgmg8mOkrXXrEJs9G9qXX8KuqJDq217p2dq/H0b//r55QTyOut27Ier27nVLzj3XN0Hn4ouhvfOOpNMWwqwzAuIuspctQ2jmTB5nkF/k7t2yLnai2rx1110wnn/eN0/JrVshUqtXu7GJE/0Ri0TQuHo1wqNGSTo0sNz2bFQnpW78m9gbqh37qSLKx1EqwjFUK+PoUYglS6AtWiTpuOPHI11ZmVcrRtxKDSViJIcV4lhz507ER4yQQvXTzHnzIKyRI1399df90AGTT03btyNz6qmSTlsAqkJOsfTOnQjdfz/0XbvkYwQu6AsWoIlBZY7pE9XmS5JJiFGjoL37ri++7Ouug7AHDXK16mp/hMaMgblsGZpyqLJCBSSSScQ2b4axdi1QUiJjQen46TqcyZOBRYugde8uHcaOCKiHrsOeMcO3mjn9+0M4/fq5fss3ZlkZrEceAXO9Sq2U6qiQQn0D7FeVE/7P3rMHiVGjWvtgQoB03Vmz5A5l81ZaFB2pfrkqTH4Ox4FWUYHwrFm+vni3X79gBEQQgTtgAERpKbQ1awoy0jqdsTagMAxwsWcPsoZxwlBD2SwFA3aefBLOa69By2ah1dT4ExCzCEHsIIm2YKr2ggvg7NghU5YKCKkw12ql9J7VLmM4ER0xAtobb7RmJJGAXVd3HC2vf8WTUnnbkk4OeSbGjm3GPxL256l9dUZSLqGEQQjI/OUvJQTFLC2FedVV0olTC6ZaKLA5F8lAVIUadAjDS5YgMm9eM0Oelr3/fqTmzJEnoELMUijKBtHh9KJcVUAb+uc/ET98GHZVFYzKys7IJf+M27NnQAK6+24JsEznMnGFGGknk0H3sjLo69blF+Zccw3qXnlF7sqOxmLHwYCjUThPPAHtscf8CYg2yB4yxNW2b/dFyL7qKpgrV8LKIb645b15H6VunIQ7QoUa9JVU3ifU0AC7pgbagAGwiYnOrchLi+OVWnH3ePNEqhLBxyKNjdAYPhWIpG0pBPessyDS8+e7kVwGrdNSikSQfucdRM87T5IIypMmrW/KB7U00vy7rqYGJddfXzDMryX/zGWJzI4dbviKKzotG/Vg5rnnEJky5VshoMYNG5qrMj5bE1FyyX373MRPfuI7D02fIbtvn1yS1y+hOnmB6MqXUeBtBThXqsPn2/NxvLS8PpG3n88agwZB+PSiuY5UdTXEsX373BK61Cyt+GyZJUuQuuMO36FGZxJmykjHdu1C9LrrfHICuD/4AdyaGojagwfdBOvTS5f6J5pIyLKJceGFrS6zKOJdlVGkgLSvvkLJxRdD5O6w+WHIHj0a+sqVEOlUytU3boQxaZLv6Jc1Leuaa5BhAaBvX7m+YqmYQ5+KWILVq307iFy3+eKLsG6+ubmq4TKPc8cdEO+/70fozc8y4p42DU0PPgg3HM7fOFSEg65qNE+pI/7MMzDo9+SucPlhxGVWYs0a1F90EYRpmq5DZOucOQgtXuyH7tfPUki/+Q3Sc+dC79Yt71VzgLqqqfJEyoCrfo6hc6j66fsoH4e+lfKe6VtJw57JwFi6FLEFCwLZOZzfmjoV7hNPyKtXQsFfQv/6F2L33tuMIguiaRqsESPkVs3k7riSrLrUxlOIQlGnl/eym7faQcdShS30ibyhBukZEyZA27w5MMwk8QJNCxfCHDlSlquOhwFPmIAQJwuwEVLS9PLLyF56qVS5QIy0acLg5bwhQwJHvpnDhiH5xz9KCcjSs/dCHaqrEbvhBnYGKCJ55xHmz3+O7C23QKeHmwNpeS+vGakUuIsZtGYGD4aTK1Uf5/uYJuwtWxDesAGhTZuCx2fHYsi++iokqEr5Y8ehXOvrEZk/H6Hy8mAFlKPG6NghZnnkSGTvuguRM87I56vrq6vRfdw4idoQf/87cM458impVp9+isi6dQixZn7gALd9l6zPnjABdmUlwrnUSbvXwkuuvhp6AJ7oCbnQdemMOQx0eRc+k0GEhpZGcu5cWIYB4803ob/1FsSnnwZmgNtbE8Hmx95/vzWAquWdVbml9+6F8YtfBOJdd8lXHTBRAsvttWthsVjY4q5u23c1HAfmqlUI33OP7xgtYF66hJy7bJm8ie1qWsevhTd98YW8dhCuqAgUf9MlHHaWaCSC7KRJ0ObNg8E7Z23dm89ms3mMoirGUc1koqqhARp9o1df7RLIW2f5Cuo5d/hwOC+8gCxVLJe/pjOqoDd0Rr/xroZ8kMfza699f4REICmzhR98IGV9wqsI3h1EidHDVWXhPIiJ8LxZs6CvWUPoZ1Bf4MmhEwrBufVWmMuX58MZag43Qiu+M5mvQw2u1ou09yLOZRhA8MCKFdAefRTiyy9PDnN+Z00k4Dz8MMTUqajz3FhStwzbRNp/02UWrsn7chN71SqI8nJoe/b4XW5Rn3fPPhsuAVGTJ8ud0uELdel0Om+k2yvttkyH6vRmKypg8GJdwJDhwKUWjSJ7++3ysMl6cEPKfHC+9tK30tx09EKdWrh6Q5Nr28ju2oUQ3+Tis8QbuFBUaHPmmbCefVZe7HU07eS8gSr5+eeILl+O8MaN0IJIuAUgLefHP0Z29Gg4ZWWInnaav1d0NTQ05FVMFeNorJioUgAl9c4yGSd5inde4JLz0Uewt21D9KWXoL31VgBsFk6CN5vTU6YgOmwY7AED5PpVcbIlkMsL2FK+T0u+ZZGzUCPdkTdQhfbtQ+yRR6SgBN+CwDSo31dStCUvlrpjMdjnnYfUU0/BuuACOeo78wYq88gRaFu2wHrjDeiffCIzBBLs3Vlh0cHr0wd8aYAYOBC46CJg1CjYvXt33Su6Wl6HUg4Tt5eqryvICr8dL/yWYxUKnmOVKraCv0SjvBQiX9HVwBjvwAHoH34Io6pKXnGS15yYpPNAgTmXy1fp9O8vo2ybAunXD/FevUCwelM4DCsnaC/8RXnG/M1+8kFToFK8aiN6YcdUN4Wp9PJHbfkf65FaJpqTIWoAAAAASUVORK5CYII=", "small_icon": "iVBORw0KGgoAAAANSUhEUgAAACQAAAAkCAYAAADhAJiYAAAAAXNSR0IArs4c6QAABiNJREFUWEetWFtIVF0U/s6Zm859JCV8SoKgjMS0C0SUFQVJYQSlFUlJ9eBDZBFB+Wag9FJSkRQGQuRb9hDdKI2E/JUopKwISoiIUObijHN1zv75tjq/l3Eu+u+nxTlrr7X2Wmt/a62tCCGEpmnw+XxwuVyIRqMYHx+XNJfH44HVaoXBYEjQWm8vTHv2yP8Lru/f4bHb5W/KCoVCCIfDkh4bG0M8Hpe0EAJerxdOpxMTExNQAoGAUBQFer0esVgMqqpCp9NJmouGkJEbSVOQ6O2FeffulPaEhoagW7lS8lAWZVI2aeqiTtLTuqUxigLF6/UKMprNZgQCAanUZDJJmove4eloCGmekputT59CV1OT1KjIq1cIl5VJfi7KokzKJk1d1EmaRlgsFhkVGq0sJmQUHOroQG5tbXKDnjyBae9eGeKsQ+Z2u6WHHA6HFGA0GqXFM4X5/X7pFcacp6KrbY8eQV9Xl9Sg8IsXCJWXz8rD3Nxc5OTkSLk2m02GjTQ9xPxhHslQRiIROkkq5AfSDA+9MDP+IhqF6fRpqF1dQCSSOqGn/xoM0KqqELl3j8kIXp65OTlXd9qQBd6+haW2FsrPn5kZsQCXKCzEeGcnrFu3pr5lC4ZsZASOykqoAwNLMmTuZm3dOoh//oFuKnzzQhaPx2XImCd2u13mB2+Vc/nyzEOTrcmqCt/fv7Dl5cmdxCXmFVNlPg719yN31y5A07JVkzV/uLsb2saNMncTODQzZL7hYTiKirIWvJQN/k+fYCsu/u+WJXDI7YaroAAQYinys9+rKPD++QNHQcGklxJIvW8fdG/eZCxQ27AB0Y4OhJ1OOM6cQVzTEGhvhzMYhHboENT+/sxllZYi0NMzidQej0cotHDNmowERI8fR/DGDQmkLCORSETSTEiCJmmelKXAfv481Pb2jOT6BgehKyqaLB1QlIw2BZubYWhoSFR+Ii8RmIjL0/GWkmaS8taQNj18CHN9fUbyY9EoFPfoqHAtW5Z2Q+TmTZjq6xOlY25LQa8QOvh9Ji2La08PrBUVaXWMu91Q4leuCLWpKSWztn49MDAgFdIj9ABp1j1WcdL0EKu4xLMtW6C9e4fAFHTQW1plJfTPnqXUE6urgxKrqBD67u6UjBPd3dBv3z6vWZsbMjYbKvPp8GHoOjtnFejYly8wpMnTeFkZFC0vTyhu98IG6XTwezzzqv3MkDmGhoDRUahVVdBqaqB1dCTCR8HMJVnt2UFONX7JFAqHA4owmwWCwYUNMpkQ+/oVhhUrknrIeOAAdM+fy/3RixcxfunSrKTmdxof/vEDOWvXAqHQwrpyczMwSFWhbduG8cePZT/DnpttBGkmb05hIcFMKmHTpq+ulv/JRx4uwoNlxw6o79+nBV5FOBwCPt/CVhuN8I+OLhiy6K9fsNy+DaWkBGNVVUlvWSJkTmfqgm02Q5nYvFno+vpSZ7/bDYPLlTapk+FQImQ+H3I4yaQoTdrq1VAiR48K44MHKQ2KnDqFYEvLrBY2GxySo1RjIwy3bqWGl8pKKL7Pn4W9uDgtaPm+fYNj1apFAaN3eBjODLqI8IcP2ZUO7+/fsOTnZ1U6ZMjy84F4PO2hJ0uH2y2M3d2wHDyYdgNrXuD1a8RKSuZNo8lKh21kBHqCYQbGjLe1QRw5MjWXxeNQOdSFw+mNAiBsNoz19UlsYrmInjsHPWW0tsrEN4yOwlJeDmVsLCN5MBohQiFMsIWdbtACg4Owl5ZmJmCaS1EgXC4oU2AnCGwcDrNs8gIvX8Kyc+dkgzazhY00NcHU2JidUUvkJrobW1qStLBTrx/ayZNQ799foprMtov9+4GurtmvH36/X75+cKIk3HOszmlogNrWlpnURXJp1dUI3r0rd7ONmX4JmRWymbP9WH8/7Js2LVJd6m3i40dZapLO9qleP1gkY5cvw9Tc/L8YFj17FoZr1+Cfet5J+mCVyetHgFf5wgWYOjqAiYmsjYucOAG0tsJktaZ//Zgeg/gEw/aTucS2gTQX289gMCinCtIcs03Xr0O9c4djLxT2UtNTLmHAYoGwWiGOHYNy9aqUQVlsdZkrpIldbHlJM3/5sMWJhd/+BXe3XAACKxnuAAAAAElFTkSuQmCC", "visible": true, "tab_version": "4.1.4", "tab_build_no": "0", "build_no": 18}, "data_input_builder": {"datainputs": [{"index": "default", "sourcetype": "sigsci-event", "interval": "300", "use_external_validation": true, "streaming_mode_xml": true, "name": "SigsciEvent", "title": "SigsciEvent", "description": "", "type": "customized", "parameters": [{"name": "site_api_name", "label": "Site API Name", "help_string": "This is the Site API Name. It should not be a URL.", "required": true, "format_type": "text", "default_value": "", "placeholder": "", "type": "text", "value": "jeremycx"}, {"name": "disable_catchup", "label": "Disable Catchup", "help_string": "Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", "required": false, "format_type": "checkbox", "default_value": true, "type": "checkbox", "value": false}, {"name": "twenty_hour_catchup", "label": "24 Hour Catchup", "help_string": "In the event the last time stored is >24Hours the TA will try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be False in order to work. ", "required": false, "format_type": "checkbox", "default_value": false, "type": "checkbox", "value": false}, {"name": "request_timeout", "label": "Request Timeout", "help_string": "Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required": true, "format_type": "text", "default_value": "60", "placeholder": "60", "type": "text", "value": "60"}, {"name": "read_timeout", "label": "read_timeout", "help_string": "Configured Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required": true, "format_type": "text", "default_value": "60", "placeholder": "60", "type": "text", "value": "60"}], "data_inputs_options": [{"type": "customized_var", "name": "site_api_name", "title": "Site API Name", "description": "This is the Site API Name. It should not be a URL.", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "", "placeholder": ""}, {"type": "customized_var", "name": "disable_catchup", "title": "Disable Catchup", "description": "Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", "required_on_edit": false, "required_on_create": false, "format_type": "checkbox", "default_value": true}, {"type": "customized_var", "name": "twenty_hour_catchup", "title": "24 Hour Catchup", "description": "In the event the last time stored is >24Hours the TA will try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be False in order to work. ", "required_on_edit": false, "required_on_create": false, "format_type": "checkbox", "default_value": false}, {"type": "customized_var", "name": "request_timeout", "title": "Request Timeout", "description": "Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "60", "placeholder": "60"}, {"type": "customized_var", "name": "read_timeout", "title": "read_timeout", "description": "Configured Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "60", "placeholder": "60"}], "code": "# encoding = utf-8\nfrom timeit import default_timer as timer\nimport requests\nimport json\nimport time\nfrom datetime import datetime\nfrom sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time, validate_timeouts\n\n\"\"\"\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n\"\"\"\n\n\n# def use_single_instance_mode():\n# return True\n\n\ndef validate_input(helper, definition):\n request_timeout = definition.parameters.get(\"request_timeout\", None)\n read_timeout = definition.parameters.get(\"read_timeout\", None)\n validate_timeouts(request_timeout, read_timeout)\n \n site_name = definition.parameters.get(\"site_api_name\", None)\n if site_name is None or site_name == \"\":\n msg = \"The site_name can not be empty\"\n raise ValueError(\"InvalidSiteName\", msg)\n elif \"http\" in site_name:\n msg = (\n \"The site name is not the full URL it should be the \",\n \"API Name of the site like 'my_example_site'\",\n )\n raise ValueError(\"InvalidSiteName\", msg)\n elif \" \" in site_name:\n msg = (\n \"The site name should be the API Name of the site like \",\n \"not the Display Name. Example would be 'my_site_name' instead of \",\n \"My Site Name\",\n )\n raise ValueError(\"InvalidSiteName\", msg)\n\n # Catchup Opts\n twenty_hour_catchup = definition.parameters.get('twenty_hour_catchup', None)\n disable_catchup = definition.parameters.get('disable_catchup', None)\n if twenty_hour_catchup and disable_catchup is True:\n raise ValueError(f\"Catch up values are mutually exclusive\")\n \n pass\n\n\ndef collect_events(helper, ew):\n start = timer()\n loglevel = helper.get_log_level()\n # Proxy setting configuration\n # proxy_settings = helper.get_proxy()\n global_email = helper.get_global_setting(\"email\")\n global_api_token = helper.get_global_setting(\"api_token\")\n global_corp_api_name = helper.get_global_setting(\"corp_api_name\")\n api_host = \"https://dashboard.signalsciences.net\"\n helper.log_info(\"email: %s\" % global_email)\n helper.log_info(\"corp: %s\" % global_corp_api_name)\n \n # Request / Read Timeouts\n request_timeout = float(helper.get_arg(\"request_timeout\"))\n read_timeout = float(helper.get_arg('read_timeout'))\n helper.log_info(f\"request configuration is: request:{request_timeout}, read: {read_timeout}\")\n \n # Config Declaration\n twenty_hour_catchup = helper.get_arg('twenty_hour_catchup')\n helper.log_info(f\"twenty four hour catchup is: {twenty_hour_catchup}\")\n \n disable_catchup = helper.get_arg('disable_catchup')\n helper.log_info(f\"disable catchup is: {disable_catchup}\")\n\n def pull_events(current_site, delta, key=None):\n site_name = current_site\n last_name = f\"events_last_until_time_{current_site}\"\n last_run_until = helper.get_check_point(last_name)\n helper.log_info(f\"last_run_until: {last_run_until}\")\n if last_run_until is None:\n (until_time, from_time) = get_from_and_until_times(\n helper, delta, five_min_offset=False\n )\n else:\n (until_time, from_time) = get_until_time(\n helper, last_run_until, delta, twenty_hour_catchup=twenty_hour_catchup, catchup_disabled=disable_catchup, five_min_offset=False\n )\n if from_time is None or from_time > until_time:\n helper.log_info(f\"{from_time} >= current now time, skipping run\")\n return\n if from_time >= until_time:\n helper.save_check_point(last_name, from_time)\n helper.log_info(\n f\"from_time {from_time} >= until_time {until_time}, skipping run\"\n )\n return\n helper.save_check_point(last_name, until_time)\n helper.log_info(\"SiteName: %s\" % site_name)\n\n helper.log_info(f\"Start Period: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(from_time))}\")\n helper.log_info(f\"End Period: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(until_time))}\")\n\n input_name = helper.get_input_stanza_names()\n single_name = \"\"\n\n if type(input_name) is dict and input_name > 1:\n helper.log_info(\"Multi instance mode\")\n for current_name in input_name:\n single_name = current_name\n else:\n helper.log_info(\"Single instance mode\")\n helper.log_info(\"Inputs: %s\" % input_name)\n helper.log_info(\"Inputs Num: %s\" % len(input_name))\n single_name = input_name\n helper.log_info(f\"single_name: {single_name}\")\n\n # Loop across all the data and output it in one big JSON object\n url = (\n f\"{api_host}/api/v0/corps/{global_corp_api_name}\"\n f\"/sites/{site_name}/activity?\"\n f\"from={from_time}&until={until_time}\"\n )\n helper.log_info(\"Pulling results from Events API\")\n config = Config(\n url=url,\n api_host=api_host,\n from_time=from_time,\n until_time=until_time,\n global_email=global_email,\n global_corp_api_name=global_corp_api_name,\n current_site=current_site,\n request_timeout=request_timeout,\n read_timeout=read_timeout,\n )\n config.headers = {\n \"Content-type\": \"application/json\",\n \"x-api-user\": global_email,\n \"x-api-token\": global_api_token,\n \"User-Agent\": config.user_agent_string,\n }\n all_events = get_results(\"Events\", helper, config)\n total_requests = len(all_events)\n helper.log_info(\"Total Events Pulled: %s\" % total_requests)\n write_start = timer()\n for current_event in all_events:\n helper.log_debug(current_event)\n if key is None:\n source_type = helper.get_sourcetype()\n helper.log_info(\"Concurrent Mode\")\n source_type_info = type(source_type)\n active_index = helper.get_output_index()\n index_info = type(active_index)\n single_name_info = type(single_name)\n current_event_info = type(current_event)\n helper.log_info(f\"source_type: {source_type}\")\n helper.log_info(f\"source_type_info: {source_type_info}\")\n helper.log_info(f\"index: {active_index}\")\n helper.log_info(f\"index_info: {index_info}\")\n helper.log_info(f\"single_name: {single_name}\")\n helper.log_info(f\"single_name_info: {single_name_info}\")\n helper.log_info(f\"current_event: {current_event}\")\n helper.log_info(f\"current_event_info: {current_event_info}\")\n event = helper.new_event(\n source=single_name,\n index=helper.get_output_index(),\n sourcetype=source_type,\n data=current_event,\n )\n else:\n indexes = helper.get_output_index()\n current_index = indexes[key]\n types = helper.get_sourcetype()\n source_type = types[key]\n single_name = single_name[0]\n helper.log_info(\"Sequential Mode\")\n helper.log_info(f\"source_type: {source_type}\")\n helper.log_info(f\"index: {current_index}\")\n helper.log_info(f\"single_name: {single_name}\")\n helper.log_info(f\"current_event: {current_event}\")\n event = helper.new_event(\n source=single_name,\n index=current_index,\n sourcetype=source_type,\n data=current_event,\n )\n\n try:\n ew.write_event(event)\n except Exception as e:\n raise e\n write_end = timer()\n write_time = write_end - write_start\n write_time_result = round(write_time, 2)\n helper.log_info(\"Total Event Output Time: %s seconds\" % write_time_result)\n\n # If multiple inputs configured it creates an array of values and the\n # script only gets called once per Input configuration\n\n all_sites = helper.get_arg(\"site_api_name\")\n time_deltas = helper.get_arg(\"interval\")\n helper.log_info(f\"interval: {time_deltas}\")\n if type(all_sites) is dict:\n helper.log_info(\"run_type: Sequential\")\n for active_input in all_sites:\n site = all_sites[active_input]\n current_delta = int(time_deltas[active_input])\n helper.log_info(\"site: %s\" % site)\n pull_events(key=active_input, current_site=site, delta=current_delta)\n helper.log_info(\"Finished Pulling Events for %s\" % site)\n else:\n helper.log_info(\"Run Type: Concurrent\")\n site = helper.get_arg(\"site_api_name\")\n helper.log_info(\"site: %s\" % site)\n pull_events(current_site=site, delta=int(time_deltas))\n helper.log_info(\"Finished Pulling Events for %s\" % site)\n end = timer()\n total_time = end - start\n time_result = round(total_time, 2)\n helper.log_info(\"Total Script Time: %s seconds\" % time_result)", "customized_options": [{"name": "site_api_name", "value": "jeremycx"}, {"name": "disable_catchup", "value": false}, {"name": "twenty_hour_catchup", "value": false}, {"name": "request_timeout", "value": "60"}, {"name": "read_timeout", "value": "60"}], "uuid": "294ad5bbdf92407b9a6785b46106152a", "sample_count": 0}, {"index": "default", "sourcetype": "sigsci-activity", "interval": "300", "use_external_validation": true, "streaming_mode_xml": true, "name": "SigsciActivity", "title": "SigsciActivity", "description": "", "type": "customized", "parameters": [{"name": "disable_catchup", "label": "Disable Catchup", "help_string": "Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", "required": false, "format_type": "checkbox", "default_value": true, "type": "checkbox", "value": true}, {"name": "twenty_hour_catchup", "label": "24 Hour Catchup", "help_string": "In the event the last time stored is >24Hours the TA will try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be false in order to work. ", "required": false, "format_type": "checkbox", "default_value": false, "type": "checkbox", "value": false}, {"name": "request_timeout", "label": "Request Timeout", "help_string": "Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required": true, "format_type": "text", "default_value": "60", "placeholder": "60", "type": "text", "value": "60"}, {"name": "read_timeout", "label": "Read Timeout", "help_string": "Configures Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required": true, "format_type": "text", "default_value": "60", "placeholder": "60", "type": "text", "value": "60"}], "data_inputs_options": [{"type": "customized_var", "name": "disable_catchup", "title": "Disable Catchup", "description": "Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", "required_on_edit": false, "required_on_create": false, "format_type": "checkbox", "default_value": true}, {"type": "customized_var", "name": "twenty_hour_catchup", "title": "24 Hour Catchup", "description": "In the event the last time stored is >24Hours the TA will try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be false in order to work. ", "required_on_edit": false, "required_on_create": false, "format_type": "checkbox", "default_value": false}, {"type": "customized_var", "name": "request_timeout", "title": "Request Timeout", "description": "Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "60", "placeholder": "60"}, {"type": "customized_var", "name": "read_timeout", "title": "Read Timeout", "description": "Configures Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "60", "placeholder": "60"}], "code": "# encoding = utf-8\nfrom timeit import default_timer as timer\nimport json\nimport time\nfrom datetime import datetime\nfrom sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time, validate_timeouts\n\n\"\"\"\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n\"\"\"\n\n\n# def use_single_instance_mode():\n# return True\n\n\ndef validate_input(helper, definition):\n request_timeout = definition.parameters.get(\"request_timeout\", None)\n read_timeout = definition.parameters.get(\"read_timeout\", None)\n validate_timeouts(request_timeout, read_timeout) \n \n # Catchup Opts\n twenty_hour_catchup = definition.parameters.get('twenty_hour_catchup', None)\n disable_catchup = definition.parameters.get('disable_catchup', None)\n if twenty_hour_catchup and disable_catchup is True:\n raise ValueError(f\"Catch up values are mutually exclusive\")\n pass\n\n\ndef collect_events(helper, ew):\n start = timer()\n loglevel = helper.get_log_level()\n helper.set_log_level(loglevel)\n # Proxy setting configuration\n # proxy_settings = helper.get_proxy()\n global_email = helper.get_global_setting(\"email\")\n global_api_token = helper.get_global_setting(\"api_token\")\n global_corp_api_name = helper.get_global_setting(\"corp_api_name\")\n api_host = \"https://dashboard.signalsciences.net\"\n helper.log_info(\"email: %s\" % global_email)\n helper.log_info(\"corp: %s\" % global_corp_api_name)\n \n # Request / Read Timeouts\n request_timeout = float(helper.get_arg(\"request_timeout\"))\n read_timeout = float(helper.get_arg('read_timeout'))\n helper.log_info(f\"request configuration is: request:{request_timeout}, read: {read_timeout}\")\n \n # CatchUp Config Declaration\n twenty_hour_catchup = helper.get_arg('twenty_hour_catchup')\n helper.log_info(f\"twenty four hour catchup is: {twenty_hour_catchup}\")\n \n disable_catchup = helper.get_arg('disable_catchup')\n helper.log_info(f\"disable catchup is: {disable_catchup}\")\n\n def pull_events(delta, key=None):\n last_run_until = helper.get_check_point(\"activity_last_until_time\")\n helper.log_info(f\"last_run_until: {last_run_until}\")\n if last_run_until is None:\n (until_time, from_time) = get_from_and_until_times(\n helper, delta, five_min_offset=False\n )\n else:\n (until_time, from_time) = get_until_time(\n helper, last_run_until, delta, twenty_hour_catchup=twenty_hour_catchup, catchup_disabled=disable_catchup, five_min_offset=False\n )\n if from_time is None:\n helper.log_info(f\"{last_run_until} >= current now time, skipping run\")\n return\n if from_time >= until_time:\n helper.save_check_point(\"activity_last_until_time\", from_time)\n helper.log_info(\n f\"from_time {from_time} >= until_time {until_time}, skipping run\"\n )\n return\n helper.save_check_point(\"activity_last_until_time\", until_time)\n\n helper.log_info(f\"Start Period: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(from_time))}\")\n helper.log_info(f\"End Period: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(until_time))}\")\n\n input_name = helper.get_input_stanza_names()\n single_name = \"\"\n\n if type(input_name) is dict and input_name > 1:\n helper.log_info(\"Multi instance mode\")\n for current_name in input_name:\n single_name = current_name\n else:\n helper.log_info(\"Single instance mode\")\n helper.log_info(\"Inputs: %s\" % input_name)\n helper.log_info(\"Inputs Num: %s\" % len(input_name))\n single_name = input_name\n helper.log_info(f\"single_name: {single_name}\")\n\n # Loop across all the data and output it in one big JSON object\n url = (\n f\"{api_host}/api/v0/corps/{global_corp_api_name}\"\n f\"/activity?\"\n f\"from={from_time}&until={until_time}\"\n )\n config = Config(\n url=url,\n api_host=api_host,\n from_time=from_time,\n until_time=until_time,\n global_email=global_email,\n global_corp_api_name=global_corp_api_name,\n current_site=\"\",\n request_timeout=request_timeout,\n read_timeout=read_timeout,\n )\n config.headers = {\n \"Content-type\": \"application/json\",\n \"x-api-user\": global_email,\n \"x-api-token\": global_api_token,\n \"User-Agent\": config.user_agent_string,\n }\n helper.log_info(\"Pulling results from Corp Activity API\")\n all_events = get_results(\"Activity Events\", helper, config)\n total_requests = len(all_events)\n helper.log_info(\"Total Corp Activity Pulled: %s\" % total_requests)\n write_start = timer()\n for current_event in all_events:\n if key is None:\n source_type = helper.get_sourcetype()\n helper.log_info(\"Concurrent Mode\")\n source_type_info = type(source_type)\n active_index = helper.get_output_index()\n index_info = type(active_index)\n single_name_info = type(single_name)\n current_event_info = type(current_event)\n helper.log_info(f\"source_type: {source_type}\")\n helper.log_info(f\"source_type_info: {source_type_info}\")\n helper.log_info(f\"index: {active_index}\")\n helper.log_info(f\"index_info: {index_info}\")\n helper.log_info(f\"single_name: {single_name}\")\n helper.log_info(f\"single_name_info: {single_name_info}\")\n helper.log_info(f\"current_event: {current_event}\")\n helper.log_info(f\"current_event_info: {current_event_info}\")\n event = helper.new_event(\n source=single_name,\n index=helper.get_output_index(),\n sourcetype=source_type,\n data=current_event,\n )\n else:\n indexes = helper.get_output_index()\n current_index = indexes[key]\n types = helper.get_sourcetype()\n source_type = types[key]\n single_name = single_name[0]\n helper.log_info(\"Sequential Mode\")\n helper.log_info(f\"source_type: {source_type}\")\n helper.log_info(f\"index: {current_index}\")\n helper.log_info(f\"single_name: {single_name}\")\n helper.log_info(f\"current_event: {current_event}\")\n event = helper.new_event(\n source=single_name,\n index=current_index,\n sourcetype=source_type,\n data=current_event,\n )\n\n try:\n ew.write_event(event)\n except Exception as e:\n raise e\n write_end = timer()\n write_time = write_end - write_start\n write_time_result = round(write_time, 2)\n helper.log_info(f\"Total Corp Activity Output Time: {write_time_result} seconds\")\n\n # If multiple inputs configured it creates an array of values and the\n # script only gets called once per Input configuration\n time_deltas = helper.get_arg(\"interval\")\n helper.log_info(f\"interval: {time_deltas}\")\n if type(time_deltas) is dict:\n helper.log_info(\"run_type: Sequential\")\n for active_input in time_deltas:\n time_delta = time_deltas[active_input]\n time_delta = int(time_delta)\n helper.log_info(\"time_delta: %s\" % time_delta)\n pull_events(delta=time_delta, key=active_input)\n else:\n helper.log_info(\"Run Type: Concurrent\")\n helper.log_info(\"time_delta: %s\" % time_deltas)\n pull_events(delta=int(time_deltas))\n helper.log_info(\"Finished Pulling Corp Activity\")\n end = timer()\n total_time = end - start\n time_result = round(total_time, 2)\n helper.log_info(f\"Total Script Time: {time_result} seconds\")\n", "customized_options": [{"name": "disable_catchup", "value": true}, {"name": "twenty_hour_catchup", "value": false}, {"name": "request_timeout", "value": "60"}, {"name": "read_timeout", "value": "60"}], "uuid": "07b37d1943e942cf831c8ee85ffcb4a2", "sample_count": 0}, {"index": "default", "sourcetype": "sigsci-requests", "interval": "300", "use_external_validation": true, "streaming_mode_xml": true, "name": "SigsciRequests", "title": "SigsciRequests", "description": "", "type": "customized", "parameters": [{"name": "site_api_name", "label": "Site API Name", "help_string": "This is the API Name of the site to pull request data from. This should not be a URL.", "required": true, "format_type": "text", "default_value": "", "placeholder": "", "type": "text", "value": "jeremycx"}, {"name": "request_limit", "label": "Request Limit", "help_string": "The amount of request objects returned in the array. Default: 100. Max:1000", "required": true, "format_type": "text", "default_value": "1000", "placeholder": "", "type": "text", "value": "1000"}, {"name": "disable_catchup", "label": "Disable Catchup", "help_string": "Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", "required": false, "format_type": "checkbox", "default_value": true, "type": "checkbox", "value": true}, {"name": "twenty_hour_catchup", "label": "24 Hour Catchup", "help_string": "In the event the last time stored is >24hours the TA will try can try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be False in order to work.", "required": false, "format_type": "checkbox", "default_value": false, "type": "checkbox", "value": false}, {"name": "attack_and_anomaly_signals_only", "label": "Attack & Anomaly Signals Only", "help_string": "Only retrieves requests that contain attack or anomaly signals. Please evaluate your signal configuration if there are overly inclusive signals creating excessive requests.", "required": false, "format_type": "checkbox", "default_value": false, "type": "checkbox", "value": false}, {"name": "request_timeout", "label": "Request Timeout", "help_string": "Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required": true, "format_type": "text", "default_value": "60", "placeholder": "Request Timeout", "type": "text", "value": "60"}, {"name": "read_timeout", "label": "Read Timeout", "help_string": "Configures Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required": true, "format_type": "text", "default_value": "60", "placeholder": "", "type": "text", "value": "60"}], "data_inputs_options": [{"type": "customized_var", "name": "site_api_name", "title": "Site API Name", "description": "This is the API Name of the site to pull request data from. This should not be a URL.", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "", "placeholder": ""}, {"type": "customized_var", "name": "request_limit", "title": "Request Limit", "description": "The amount of request objects returned in the array. Default: 100. Max:1000", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "1000", "placeholder": ""}, {"type": "customized_var", "name": "disable_catchup", "title": "Disable Catchup", "description": "Disables catch-up behavior. Events will always be ingested from now minus the delta (including an offset for the requests feed). Recommended to be left true. Default: True.", "required_on_edit": false, "required_on_create": false, "format_type": "checkbox", "default_value": true}, {"type": "customized_var", "name": "twenty_hour_catchup", "title": "24 Hour Catchup", "description": "In the event the last time stored is >24hours the TA will try can try and catch-up from exactly 24 hours ago, otherwise resets to now minus the delta. 'Disable Catchup' must be False in order to work.", "required_on_edit": false, "required_on_create": false, "format_type": "checkbox", "default_value": false}, {"type": "customized_var", "name": "attack_and_anomaly_signals_only", "title": "Attack & Anomaly Signals Only", "description": "Only retrieves requests that contain attack or anomaly signals. Please evaluate your signal configuration if there are overly inclusive signals creating excessive requests.", "required_on_edit": false, "required_on_create": false, "format_type": "checkbox", "default_value": false}, {"type": "customized_var", "name": "request_timeout", "title": "Request Timeout", "description": "Configures Request Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "60", "placeholder": "Request Timeout"}, {"type": "customized_var", "name": "read_timeout", "title": "Read Timeout", "description": "Configures Read Timeout for HTTP operations. Consider increasing if on a slow connection or pagination batches are large.", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "60", "placeholder": ""}], "customized_options": [{"name": "site_api_name", "value": "jeremycx"}, {"name": "request_limit", "value": "1000"}, {"name": "disable_catchup", "value": true}, {"name": "twenty_hour_catchup", "value": false}, {"name": "attack_and_anomaly_signals_only", "value": false}, {"name": "request_timeout", "value": "60"}, {"name": "read_timeout", "value": "60"}], "code": "# encoding = utf-8\nfrom timeit import default_timer as timer\nimport time\nfrom datetime import datetime, timezone, timedelta\nfrom sigsci_helper import get_from_and_until_times, Config, get_results, get_until_time, validate_timeouts\n\n\"\"\"\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n\"\"\"\n\n# def use_single_instance_mode():\n# return True\n\ndef validate_input(helper,definition):\n request_limit = int(definition.parameters.get(\"request_limit\", None))\n if request_limit is None or request_limit == \"\":\n raise ValueError('The request limit cannot be blank')\n if request_limit <= 0:\n raise ValueError('The request limit cannot be 0')\n if request_limit > 1000:\n raise ValueError('Request Limit cannot be greater than 1000')\n\n # Read Timeout passed to send_http_request. Type: float.\n # https://docs.splunk.com/Documentation/AddonBuilder/4.1.4/UserGuide/PythonHelperFunctions\n # We do this per input module as splunk provides no way to validate global configuration arguments :')\n request_timeout = definition.parameters.get(\"request_timeout\", None)\n read_timeout = definition.parameters.get(\"read_timeout\", None)\n validate_timeouts(request_timeout, read_timeout)\n\n twenty_hour_catchup = definition.parameters.get('twenty_hour_catchup', None)\n disable_catchup = definition.parameters.get('disable_catchup', None)\n if twenty_hour_catchup and disable_catchup is True:\n raise ValueError(f\"Catch up values are mutually exclusive\")\n\n site_name = definition.parameters.get(\"site_api_name\", None)\n if site_name is None or site_name == \"\":\n msg = \"The site_name can not be empty\"\n raise ValueError(\"InvalidSiteName\", msg)\n elif \"http\" in site_name:\n msg = (\n \"The site name is not the full URL it should be the \",\n \"API Name of the site like 'my_example_site'\",\n )\n raise ValueError(\"InvalidSiteName\", msg)\n elif \" \" in site_name:\n msg = (\n \"The site name should be the API Name of the site like \",\n \"not the Display Name. Example would be 'my_site_name' instead of \",\n \"My Site Name\",\n )\n raise ValueError(\"InvalidSiteName\", msg)\n pass\n\n\ndef collect_events(helper, ew):\n start = timer()\n loglevel = helper.get_log_level()\n helper.set_log_level(loglevel)\n # Proxy setting configuration\n # proxy_settings = helper.get_proxy()\n api_host = \"https://dashboard.signalsciences.net\"\n global_email = helper.get_global_setting(\"email\")\n global_api_token = helper.get_global_setting(\"api_token\")\n global_corp_api_name = helper.get_global_setting(\"corp_api_name\")\n helper.log_info(\"email: %s\" % global_email)\n helper.log_info(\"corp: %s\" % global_corp_api_name)\n \n # Request / Read Timeouts\n request_timeout = float(helper.get_arg(\"request_timeout\"))\n read_timeout = float(helper.get_arg('read_timeout'))\n helper.log_info(f\"request configuration is: request:{request_timeout}, read: {read_timeout}\")\n\n # Config declaration.\n twenty_hour_catchup = helper.get_arg('twenty_hour_catchup')\n helper.log_info(f\"twenty four hour catchup is: {twenty_hour_catchup}\")\n\n disable_catchup = helper.get_arg('disable_catchup')\n helper.log_info(f\"disable catchup is: {disable_catchup}\")\n\n attack_and_anomaly_signals_only = helper.get_arg('attack_and_anomaly_signals_only')\n helper.log_info(f\"attack signals only is: {attack_and_anomaly_signals_only}\")\n\n def pull_requests(helper, current_site, delta, key=None):\n site_name = current_site\n last_name = f\"requests_last_until_time_{current_site}\"\n last_run_until = helper.get_check_point(last_name)\n request_limit = helper.get_arg('request_limit')\n helper.log_info(f\"request limit: {request_limit}\")\n\n if last_run_until is None:\n helper.log_info(\"no last_run_time found in checkpoint state\")\n helper.log_debug(\"get_from_until\")\n until_time, from_time = get_from_and_until_times(\n helper, delta, five_min_offset=True\n )\n else:\n helper.log_info(f\"last_run_until found in state: {last_run_until}\")\n helper.log_debug(\"get_until\")\n until_time, from_time = get_until_time(\n helper, last_run_until, delta, twenty_hour_catchup, disable_catchup, five_min_offset=True\n )\n\n if from_time is None:\n helper.log_info(f\"{last_run_until} >= current now time, skipping run\")\n return\n\n if from_time >= until_time:\n helper.save_check_point(last_name, from_time)\n helper.log_info(\n f\"from_time {from_time} >= until_time {until_time}, skipping run\"\n )\n return\n\n helper.log_info(\"SiteName: %s\" % site_name)\n helper.log_info(f\"Start Period: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(from_time))}\")\n helper.log_info(f\"End Period: {time.strftime('%Y-%m-%d %H:%M:%S UTC', time.gmtime(until_time))}\")\n\n input_name = helper.get_input_stanza_names()\n single_name = \"\"\n\n if type(input_name) is dict and input_name > 1:\n helper.log_info(\"Multi instance mode\")\n for current_name in input_name:\n single_name = current_name\n else:\n helper.log_info(\"Single instance mode\")\n helper.log_info(\"Inputs: %s\" % input_name)\n helper.log_info(\"Inputs Num: %s\" % len(input_name))\n single_name = input_name\n helper.log_info(f\"single_name: {single_name}\")\n\n # Loop across all the data and output it in one big JSON object\n url = (\n f\"{api_host}/api/v0/corps/{global_corp_api_name}\"\n f\"/sites/{site_name}/feed/requests\"\n f\"?limit={request_limit}\"\n f\"&from={from_time}&until={until_time}\"\n )\n if attack_and_anomaly_signals_only:\n attack_signals = [\n \"USERAGENT\",\n \"AWS-SSRF\",\n \"BACKDOOR\",\n \"CMDEXE\",\n \"SQLI\",\n \"TRAVERSAL\",\n \"XSS\",\n \"XXE\"\n ]\n anomaly_signals = [\n \"2FA-DISABLED\", \"2FA-CHANGED\", \"ABNORMALPATH\", \"ADDRESS-CHANGED\", \"ALLOWED\",\n \"BHH\", \"BLOCKED\", \"BODY-PARSER-EVASION\", \"CODEINJECTION\", \"COMPRESSED\",\n \"CC-VAL-ATTEMPT\", \"CC-VAL-FAILURE\", \"CC-VAL-SUCCESS\", \"CVE-2017-5638\",\n \"CVE-2017-7269\", \"CVE-2017-9805\", \"CVE-2018-11776\", \"CVE-2018-15961\",\n \"CVE-2018-9206\", \"CVE-2019-0192\", \"CVE-2019-0193\", \"CVE-2019-0232\",\n \"CVE-2019-11580\", \"CVE-2019-14234\", \"CVE-2019-16759\", \"CVE-2019-2725\",\n \"CVE-2019-3396\", \"CVE-2019-3398\", \"CVE-2019-5418\", \"CVE-2019-6340\",\n \"CVE-2019-8394\", \"CVE-2019-8451\", \"CVE-2021-26084\", \"CVE-2021-26855\",\n \"CVE-2021-40438\", \"CVE-2021-44228\", \"CVE-2021-44228-STRICT\",\n \"CVE-2022-22963\", \"CVE-2022-22965\", \"CVE-2022-26134\", \"CVE-2022-42889\",\n \"CVE-2023-34362\", \"CVE-2023-38218\", \"DATACENTER\", \"DOUBLEENCODING\",\n \"EMAIL-CHANGED\", \"EMAIL-VALIDATION\", \"FORCEFULBROWSING\", \"GC-VAL-ATTEMPT\",\n \"GC-VAL-FAILURE\", \"GC-VAL-SUCCESS\", \"GRAPHQL-API\", \"GRAPHQL-DUPLICATE-VARIABLES\",\n \"GRAPHQL-IDE\", \"GRAPHQL-INTROSPECTION\", \"GRAPHQL-DEPTH\",\n \"GRAPHQL-MISSING-REQUIRED-OPERATION-NAME\",\n \"GRAPHQL-UNDEFINED-VARIABLES\", \"HTTP403\", \"HTTP404\", \"HTTP429\",\n \"HTTP4XX\", \"HTTP500\", \"HTTP503\", \"HTTP5XX\", \"IMPOSTOR\", \"INFO-VIEWED\",\n \"INSECURE-AUTH\", \"NOTUTF8\", \"INVITE-FAILURE\", \"INVITE-ATTEMPT\",\n \"INVITE-SUCCESS\", \"JSON-ERROR\", \"KBA-CHANGED\", \"LOGINATTEMPT\",\n \"LOGINDISCOVERY\", \"LOGINFAILURE\", \"LOGINSUCCESS\", \"MALFORMED-DATA\",\n \"SANS\", \"MESSAGE-SENT\", \"NO-CONTENT-TYPE\", \"NOUA\", \"NULLBYTE\",\n \"OOB-DOMAIN\", \"PW-CHANGED\", \"PW-RESET-ATTEMPT\", \"PW-RESET-FAILURE\",\n \"PW-RESET-SUCCESS\", \"PRIVATEFILE\", \"rate-limit\", \"REGATTEMPT\", \"REGFAILURE\",\n \"REGSUCCESS\", \"RSRC-ID-ENUM-ATTEMPT\", \"RSRC-ID-ENUM-FAILURE\",\n \"RSRC-ID-ENUM-SUCCESS\", \"RESPONSESPLIT\", \"SCANNER\", \"SIGSCI-IP\",\n \"TORNODE\", \"WRONG-API-CLIENT\", \"USER-ID-ENUM-ATTEMPT\",\n \"USER-ID-ENUM-FAILURE\", \"USER-ID-ENUM-SUCCESS\", \"WEAKTLS\", \"XML-ERROR\"\n ]\n attack_tags = \",\".join(attack_signals)\n anomaly_tags = \",\".join(anomaly_signals)\n url = f\"{url}&tags={attack_tags},{anomaly_tags}\"\n config = Config(\n url=url,\n api_host=api_host,\n from_time=from_time,\n until_time=until_time,\n global_email=global_email,\n global_corp_api_name=global_corp_api_name,\n current_site=current_site,\n request_timeout=request_timeout,\n read_timeout=read_timeout,\n )\n config.headers = {\n \"Content-type\": \"application/json\",\n \"x-api-user\": global_email,\n \"x-api-token\": global_api_token,\n \"User-Agent\": config.user_agent_string,\n }\n\n all_requests = get_results(\"Requests\", helper, config)\n\n total_requests = len(all_requests)\n helper.log_info(\"Total Requests Pulled: %s\" % total_requests)\n if total_requests == 0:\n helper.save_check_point(last_name, until_time)\n helper.log_info(\n f\"No events to write, saving checkpoint to value:{until_time}\"\n )\n write_start = timer()\n event_count = 0\n for current_event in all_requests:\n if key is None:\n source_type = helper.get_sourcetype()\n event = helper.new_event(\n source=single_name,\n index=helper.get_output_index(),\n sourcetype=source_type,\n data=current_event,\n )\n else:\n indexes = helper.get_output_index()\n current_index = indexes[key]\n types = helper.get_sourcetype()\n source_type = types[key]\n single_name = single_name[0]\n event = helper.new_event(\n source=single_name,\n index=current_index,\n sourcetype=source_type,\n data=current_event,\n )\n\n try:\n ew.write_event(event)\n event_count += 1 # increment the count for successful events to not spam debug.\n except Exception as e:\n helper.log_error(f\"error writing event: {e}\")\n helper.log_error(event)\n raise e\n if event_count != 0: # We save the checkpoint earlier on 0 events.\n helper.log_info(f\"{event_count} events written, saving checkpoint: {until_time}\")\n helper.save_check_point(last_name, until_time)\n write_end = timer()\n write_time = write_end - write_start\n write_time_result = round(write_time, 2)\n helper.log_info(\"Total Event Output Time: %s seconds\" % write_time_result)\n\n # If multiple inputs configured it creates an array of values and the\n # script only gets called once per Input configuration\n all_sites = helper.get_arg(\"site_api_name\")\n time_deltas = helper.get_arg(\"interval\")\n helper.log_info(f\"interval: {time_deltas}\")\n\n if type(all_sites) is dict:\n helper.log_info(\"run_type: Sequential\")\n for active_input, site in all_sites.items():\n time_delta = int(time_deltas[active_input])\n helper.log_info(\"site: %s\" % site)\n pull_requests(helper, key=active_input, current_site=site, delta=time_delta)\n helper.log_info(\"Finished Pulling Requests for %s\" % site)\n else:\n helper.log_info(\"Run Type: Concurrent\")\n site = helper.get_arg(\"site_api_name\")\n helper.log_info(\"site: %s\" % site)\n pull_requests(helper, current_site=site, delta=int(time_deltas))\n helper.log_info(\"Finished Pulling Requests for %s\" % site)\n end = timer()\n total_time = end - start\n time_result = round(total_time, 2)\n helper.log_info(\"Total Script Time: %s seconds\" % time_result)", "uuid": "aaaeb391da9043e1819408033d9db708", "sample_count": "18344"}]}, "field_extraction_builder": {"sigsci-event": {"is_parsed": true, "data_format": "json"}, "sigsci-activity": {"is_parsed": true, "data_format": "json"}, "sigsci-requests": {"is_parsed": true, "data_format": "json"}}, "global_settings_builder": {"global_settings": {"proxy_settings": {"proxy_type": "http"}, "log_settings": {"log_level": "DEBUG"}, "customized_settings": [{"required": true, "name": "email", "label": "E-mail", "default_value": "", "placeholder": "example@example.com", "help_string": "This it the e-mail username of the user who has the correct permissions for the app to pull the data.", "type": "text", "format_type": "text", "value": "jcocks+sigsci@fastly.com"}, {"required": true, "name": "corp_api_name", "label": "Corp", "default_value": "", "placeholder": "", "help_string": "This is the API name of your corp.", "type": "text", "format_type": "text", "value": "jcocks"}, {"required": true, "name": "api_token", "label": "API Token", "placeholder": "", "default_value": "", "help_string": "This is the API Token of the user who has the correct permissions. The API Token is tied to the username.", "type": "password", "format_type": "password", "value": ""}]}}, "sourcetype_builder": {"sigsci-requests": {"metadata": {"event_count": 0, "data_input_name": "SigsciRequests", "extractions_count": 0, "cims_count": 0}}, "sigsci-event": {"metadata": {"event_count": 0, "data_input_name": "SigsciEvent", "extractions_count": 0, "cims_count": 0}}, "sigsci-activity": {"metadata": {"event_count": 0, "data_input_name": "SigsciActivity", "extractions_count": 0, "cims_count": 0}}}, "validation": {"validators": ["best_practice_validation", "data_model_mapping_validation", "field_extract_validation", "app_cert_validation"], "status": "job_finished", "validation_id": "v_1710347946_44", "progress": 1.0}} \ No newline at end of file diff --git a/splunkbase_prepped_file/sigsci_TA_for_splunk-1.0.38.spl b/splunkbase_prepped_file/sigsci_TA_for_splunk-1.0.38.spl new file mode 100644 index 0000000..5190f29 Binary files /dev/null and b/splunkbase_prepped_file/sigsci_TA_for_splunk-1.0.38.spl differ