IBM QRadar SOAR

 View Only

 Unable to fetch all the incidents from resilient through API for longer retention period

Chandrabose Thirumalai Manikandan's profile image
Chandrabose Thirumalai Manikandan posted Mon April 07, 2025 06:05 AM

Hi Team,

We are trying to fetch all  the incidents from resilient from 1st to 4 lakh incidents through API for longer retention period.


Receiving an error message : Unable to process the supplied JSON. The field 'filters' is not recognized

Kindly help us on this please.

Attached code below

import requests
import json
import time
from base64 import b64encode
 
#IBM Resilient API credentials
Resilient_URL = "https://{server_name}/rest/orgs/{org_id}/incidents"
 
API_Key = "<secret_key>"
API_Secret = "<secret_name>"
 
#EEncode API credentials
auth_header = b64encode(f"{API_Key}:{API_Secret}".encode()).decode()
 
headers = {"Authorization": f"Basic {auth_header}", "Content-Type": "application/json"}
 
date_time1 = "01.01.2017 00:00:00"
date_time2 = "31.12.2017 00:00:00"
pattern = "%d.%m.%Y %H:%M:%S"
start_time = int((time.mktime(time.strptime(date_time1,pattern)))*1000)
end_time   = int((time.mktime(time.strptime(date_time2,pattern)))*1000)
print(f"Start time : {start_time}, End time : {end_time}")
 
#Fetch list of incidents
def fetch_incidents():
    batch_size = 1
    offset = 0
    incident_url = f"{Resilient_URL}/query_paged?return_level=full" 
    query = {
            "filters":[
                {
                    "conditions":[
                        {
                            "field_name":"create_date",
                            "method":"gte",
                            "value":start_time
                        },
                        {
                            "field_name":"create_date",
                            "method":"lte",
                            "value":end_time
                        }
                    ]
                }
            ],
            "sorts":[
                {
                    "field_name":"create_date",
                    "type":"desc"
                }
            ]
        }
    incidents = requests.post(Resilient_URL, headers=headers, data=json.dumps(query))
    if incidents.status_code == 200:
        incidents_data=incidents.json()
        filename = f"/home/test/incidents_data_{offset}.json"
        with open(filename, "w") as file:
            json.dump(incidents_data, file, indent=4)
        print(f"Incidents data saved to {filename}")
    else:
        print(f"Error in fetching incidents comments : {incidents.status_code} or {incidents.text}")
def main():
    fetch_incidents()
    print("All  incidents and comments are fetched")
 
if __name__ == "__main__":
    main()

Thanks,

Chandrabose