Installation
- Gradle
- Maven
Copy
Ask AI
dependencies {
implementation 'com.newscatcherapi:newscatcher-catchall-sdk:0.3.1'
}
Copy
Ask AI
<dependency>
<groupId>com.newscatcherapi</groupId>
<artifactId>newscatcher-catchall-sdk</artifactId>
<version>0.3.1</version>
</dependency>
Quickstart
Get started with CatchAll in three steps:Initialize the client
Copy
Ask AI
import com.newscatcher.catchall.CatchAllApi;
CatchAllApi client = CatchAllApi.builder()
.apiKey("YOUR_API_KEY")
.build();
Create a job
Copy
Ask AI
import com.newscatcher.catchall.resources.jobs.requests.SubmitRequestDto;
var job = client.jobs().createJob(
SubmitRequestDto.builder()
.query("AI company acquisitions")
.limit(10)
.build()
);
String jobId = job.getJobId();
Wait and retrieve results
Copy
Ask AI
import com.newscatcher.catchall.types.StatusResponseDto;
import com.newscatcher.catchall.types.JobStatus;
final int POLL_INTERVAL_MS = 60000;
// Poll for completion
while (true) {
StatusResponseDto status = client.jobs().getJobStatus(jobId);
if (JobStatus.COMPLETED.equals(status.getStatus().orElse(null))) {
break;
}
Thread.sleep(POLL_INTERVAL_MS);
}
// Get results
var results = client.jobs().getJobResults(jobId);
System.out.println("Found " + results.getValidRecords().orElse(0) + " valid records");
Jobs process asynchronously and typically complete in 10-15 minutes. See the
Quickstart for a complete walkthrough.
Working with jobs
- Get suggestions
- Create and track
- Continue jobs
- Early results
- List jobs
Preview suggested validators, enrichments, and date ranges before creating a job:
Copy
Ask AI
import com.newscatcher.catchall.CatchAllApiClient;
import com.newscatcher.catchall.resources.jobs.requests.InitializeRequestDto;
import com.newscatcher.catchall.types.InitializeResponseDto;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
CatchAllApiClient client = CatchAllApiClient.builder()
.apiKey("YOUR_API_KEY")
.build();
InitializeResponseDto suggestions = client.jobs().initialize(
InitializeRequestDto.builder()
.query("AI company acquisitions")
.context("Focus on deal size and acquiring company details")
.build()
);
ObjectMapper mapper = new ObjectMapper();
mapper.enable(SerializationFeature.INDENT_OUTPUT);
System.out.println(mapper.writeValueAsString(suggestions));
Show suggestions response
Show suggestions response
Copy
Ask AI
{
"validators": [
{
"name": "is_acquisition_event",
"description": "true if article describes a completed or announced acquisition",
"type": "boolean"
},
{
"name": "involves_ai_company",
"description": "true if acquiring or acquired company is in AI sector",
"type": "boolean"
}
],
"enrichments": [
{
"name": "acquirer_company",
"description": "Extract the acquiring company name",
"type": "company"
},
{
"name": "acquired_company",
"description": "Extract the acquired company name",
"type": "company"
},
{
"name": "deal_value",
"description": "Extract acquisition price if mentioned",
"type": "number"
},
{
"name": "announcement_date",
"description": "Extract date of announcement",
"type": "date"
},
{
"name": "acquirer_details",
"description": "Extract details about the acquiring company",
"type": "text"
}
],
"start_date": "2026-02-01T14:12:57.292205+00:00",
"end_date": "2026-02-06T14:12:57.292205+00:00",
"date_modification_message": [
"No dates were provided; using a default window of 5 days (2026-02-01 to 2026-02-06)."
]
}
To learn more, see the Initialize endpoint.
Submit a query and track its progress:
Copy
Ask AI
import com.newscatcher.catchall.resources.jobs.requests.SubmitRequestDto;
import com.newscatcher.catchall.types.StatusResponseDto;
import com.newscatcher.catchall.types.JobStatus;
final int POLL_INTERVAL_MS = 60000;
// Create job with custom validators and enrichments
var job = client.jobs().createJob(
SubmitRequestDto.builder()
.query("AI company acquisitions")
.context("Focus on deal size and acquiring company details")
.limit(10)
.validators(
ValidatorSchema.builder()
.name("is_acquisition_event")
.description("true if article describes a completed or announced acquisition")
.type("boolean")
.build()
)
.enrichments(
EnrichmentSchema.builder()
.name("acquirer_company")
.description("Extract the acquiring company name")
.type(EnrichmentType.COMPANY)
.build(),
EnrichmentSchema.builder()
.name("acquired_company")
.description("Extract the acquired company name")
.type(EnrichmentType.COMPANY)
.build(),
EnrichmentSchema.builder()
.name("deal_value")
.description("Extract acquisition price if mentioned")
.type(EnrichmentType.NUMBER)
.build()
)
.build()
);
System.out.println("Job created: " + job.getJobId());
// Monitor progress
String jobId = job.getJobId();
while (true) {
StatusResponseDto status = client.jobs().getJobStatus(jobId);
if (JobStatus.COMPLETED.equals(status.getStatus().orElse(null))) {
break;
}
status.getSteps().stream()
.filter(step -> !step.getCompleted().orElse(false))
.findFirst()
.ifPresent(step -> System.out.println(String.format(
"Step %d/7: %s",
step.getOrder(),
step.getStatus()
)));
Thread.sleep(POLL_INTERVAL_MS);
}
// Retrieve results
var results = client.jobs().getJobResults(jobId);
System.out.println("\nFound " + results.getValidRecords().orElse(0) + " valid records");
results.getAllRecords().ifPresent(records ->
records.forEach(record -> System.out.println(" " + record.getRecordTitle()))
);
Validators and enrichments are optional. If not provided, the system
generates them automatically based on your query.
Extend processing limits for completed jobs:
Copy
Ask AI
import com.newscatcher.catchall.resources.jobs.requests.ContinueRequestDto;
import com.newscatcher.catchall.types.ContinueResponseDto;
final int POLL_INTERVAL_MS = 60000;
// Continue job to process more records
ContinueResponseDto continued = client.jobs().continueJob(
ContinueRequestDto.builder()
.jobId(jobId)
.newLimit(50)
.build()
);
System.out.println(String.format(
"Continued: %s -> %s records",
continued.getPreviousLimit(),
continued.getNewLimit()
));
// Wait for completion
while (true) {
StatusResponseDto status = client.jobs().getJobStatus(jobId);
if (JobStatus.COMPLETED.equals(status.getStatus().orElse(null))) {
break;
}
Thread.sleep(POLL_INTERVAL_MS);
}
// Get final results
var finalResults = client.jobs().getJobResults(jobId);
System.out.println("Total: " + finalResults.getValidRecords().orElse(0) + " valid records");
Use
limit parameter when creating jobs to start with fewer records for quick testing.
Continue the job if you need more records after reviewing initial results.Retrieve partial results during the enriching stage:
Copy
Ask AI
import com.newscatcher.catchall.types.PullJobResponseDto;
final int POLL_INTERVAL_MS = 60000;
while (true) {
StatusResponseDto status = client.jobs().getJobStatus(jobId);
if (JobStatus.ENRICHING.equals(status.getStatus().orElse(null)) ||
JobStatus.COMPLETED.equals(status.getStatus().orElse(null))) {
PullJobResponseDto results = client.jobs().getJobResults(jobId);
if (results.getValidRecords().orElse(0) > 0) {
System.out.println(String.format(
"Progress: %d/%d validated, %d valid",
results.getProgressValidated().orElse(0),
results.getCandidateRecords().orElse(0),
results.getValidRecords().orElse(0)
));
}
if (JobStatus.COMPLETED.equals(status.getStatus().orElse(null))) {
break;
}
}
Thread.sleep(POLL_INTERVAL_MS);
}
Retrieve all jobs created by your account:
Copy
Ask AI
import com.newscatcher.catchall.types.ListUserJobsResponseDto;
import java.util.List;
List<ListUserJobsResponseDto> jobs = client.jobs().getUserJobs();
jobs.forEach(job -> {
System.out.println(String.format(
"Job %s: %s (%s)",
job.getJobId(),
job.getQuery(),
job.getStatus()
));
});
Complete example with all features
Complete example with all features
Copy
Ask AI
import com.newscatcher.catchall.CatchAllApi;
import com.newscatcher.catchall.resources.jobs.requests.SubmitRequestDto;
import com.newscatcher.catchall.resources.jobs.requests.ContinueRequestDto;
import com.newscatcher.catchall.types.StatusResponseDto;
import com.newscatcher.catchall.types.PullJobResponseDto;
import com.newscatcher.catchall.types.JobStatus;
import com.newscatcher.catchall.core.NewscatcherApiApiException;
public class CompleteExample {
public static void main(String[] args) {
final int POLL_INTERVAL_MS = 60000;
CatchAllApi client = CatchAllApi.builder()
.apiKey("YOUR_API_KEY")
.timeout(30)
.maxRetries(3)
.build();
try {
// Create job with custom enrichments
var job = client.jobs().createJob(
SubmitRequestDto.builder()
.query("AI company acquisitions")
.context("Focus on deal size and acquiring company details")
.limit(10)
.enrichments(
EnrichmentSchema.builder()
.name("acquirer_company")
.description("Extract the acquiring company name")
.type(EnrichmentType.COMPANY)
.build(),
EnrichmentSchema.builder()
.name("deal_value")
.description("Extract acquisition price if mentioned")
.type(EnrichmentType.NUMBER)
.build()
)
.build()
);
String jobId = job.getJobId();
System.out.println("Job created: " + jobId);
// Poll with early results access
while (true) {
StatusResponseDto status = client.jobs().getJobStatus(jobId);
if (JobStatus.ENRICHING.equals(status.getStatus().orElse(null)) ||
JobStatus.COMPLETED.equals(status.getStatus().orElse(null))) {
PullJobResponseDto results = client.jobs().getJobResults(jobId);
if (results.getValidRecords().orElse(0) > 0) {
System.out.println(String.format(
"Progress: %d valid records",
results.getValidRecords().orElse(0)
));
}
if (JobStatus.COMPLETED.equals(status.getStatus().orElse(null))) {
break;
}
}
Thread.sleep(POLL_INTERVAL_MS);
}
// Continue if needed
PullJobResponseDto results = client.jobs().getJobResults(jobId);
if (results.getValidRecords().orElse(0) >= 10) {
client.jobs().continueJob(
ContinueRequestDto.builder()
.jobId(jobId)
.newLimit(50)
.build()
);
while (true) {
StatusResponseDto status = client.jobs().getJobStatus(jobId);
if (JobStatus.COMPLETED.equals(status.getStatus().orElse(null))) {
break;
}
Thread.sleep(POLL_INTERVAL_MS);
}
results = client.jobs().getJobResults(jobId);
}
// Display results
System.out.println("\nFinal: " + results.getValidRecords().orElse(0) + " valid records");
results.getAllRecords().ifPresent(records ->
records.forEach(record -> System.out.println(" " + record.getRecordTitle()))
);
} catch (NewscatcherApiApiException e) {
System.err.println("Status: " + e.statusCode());
System.err.println("Error: " + e.body());
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
System.err.println("Operation interrupted");
}
}
}
Working with monitors
Automate recurring queries with scheduled execution.- Create monitor
- Update monitor
- Pause/Resume
- List monitors
- Retrieve results
Create a monitor from a completed job:
Copy
Ask AI
import com.newscatcher.catchall.resources.monitors.requests.CreateMonitorRequestDto;
import com.newscatcher.catchall.types.CreateMonitorResponseDto;
import com.newscatcher.catchall.types.WebhookDto;
import com.newscatcher.catchall.types.WebhookDtoMethod;
import java.util.Map;
CreateMonitorResponseDto monitor = client.monitors().createMonitor(
CreateMonitorRequestDto.builder()
.referenceJobId(jobId)
.schedule("every day at 12 PM UTC")
.webhook(WebhookDto.builder()
.url("https://your-endpoint.com/webhook")
.method(WebhookDtoMethod.POST)
.headers(Map.of("Authorization", "Bearer YOUR_TOKEN"))
.build())
.build()
);
System.out.println("Monitor created: " + monitor.getMonitorId().orElse("N/A"));
Monitors require a minimum 24-hour interval between executions. Learn more in the
Monitors documentation.
Update webhook configuration for an existing monitor:
Copy
Ask AI
import com.newscatcher.catchall.resources.monitors.requests.UpdateMonitorRequestDto;
import com.newscatcher.catchall.types.UpdateMonitorResponseDto;
String monitorId = monitor.getMonitorId().orElseThrow();
UpdateMonitorResponseDto updated = client.monitors().updateMonitor(
monitorId,
UpdateMonitorRequestDto.builder()
.webhook(WebhookDto.builder()
.url("https://new-endpoint.com/webhook")
.method(WebhookDtoMethod.POST)
.headers(Map.of("Authorization", "Bearer NEW_TOKEN"))
.build())
.build()
);
System.out.println("Monitor updated: " + updated.getStatus());
Control monitor execution:
Copy
Ask AI
String monitorId = monitor.getMonitorId().orElseThrow();
// Pause monitor
client.monitors().disableMonitor(monitorId);
System.out.println("Monitor paused");
// Resume monitor
client.monitors().enableMonitor(monitorId);
System.out.println("Monitor resumed");
Retrieve all monitors for your account:
Copy
Ask AI
import com.newscatcher.catchall.types.ListMonitorsResponseDto;
ListMonitorsResponseDto monitors = client.monitors().listMonitors();
System.out.println("Total monitors: " + monitors.getTotalMonitors());
monitors.getMonitors().forEach(m -> {
String status = m.getEnabled() ? "active" : "paused";
System.out.println(String.format("%s: %s", m.getMonitorId(), status));
});
Access aggregated results from all monitor executions:
Copy
Ask AI
import com.newscatcher.catchall.resources.monitors.requests.ListMonitorJobsRequest;
import com.newscatcher.catchall.resources.monitors.types.ListMonitorJobsResponse;
import com.newscatcher.catchall.types.PullMonitorResponseDto;
String monitorId = monitor.getMonitorId().orElseThrow();
// List execution history
ListMonitorJobsResponse jobs = client.monitors().listMonitorJobs(
monitorId,
ListMonitorJobsRequest.builder()
.sort("desc")
.build()
);
System.out.println("Monitor executed " + jobs.getTotalJobs() + " jobs");
// Get all collected records
PullMonitorResponseDto results = client.monitors().pullMonitorResults(monitorId);
System.out.println("Total records: " + results.getRecords().orElse(0));
results.getAllRecords().ifPresent(records ->
records.forEach(record -> {
System.out.println(" " + record.getRecordTitle());
System.out.println(" Added: " + record.getAddedOn());
})
);
Complete monitor example
Complete monitor example
Copy
Ask AI
import com.newscatcher.catchall.CatchAllApi;
import com.newscatcher.catchall.resources.monitors.requests.CreateMonitorRequestDto;
import com.newscatcher.catchall.resources.monitors.requests.UpdateMonitorRequestDto;
import com.newscatcher.catchall.resources.monitors.requests.ListMonitorJobsRequest;
import com.newscatcher.catchall.resources.monitors.types.ListMonitorJobsResponse;
import com.newscatcher.catchall.types.CreateMonitorResponseDto;
import com.newscatcher.catchall.types.UpdateMonitorResponseDto;
import com.newscatcher.catchall.types.ListMonitorsResponseDto;
import com.newscatcher.catchall.types.PullMonitorResponseDto;
import com.newscatcher.catchall.types.WebhookDto;
import com.newscatcher.catchall.types.WebhookDtoMethod;
import com.newscatcher.catchall.core.NewscatcherApiApiException;
import java.util.Map;
public class MonitorExample {
public static void main(String[] args) {
CatchAllApi client = CatchAllApi.builder()
.apiKey("YOUR_API_KEY")
.build();
try {
// Create monitor from completed job
String jobId = "af7a26d6-cf0b-458c-a6ed-4b6318c74da3";
CreateMonitorResponseDto monitor = client.monitors().createMonitor(
CreateMonitorRequestDto.builder()
.referenceJobId(jobId)
.schedule("every day at 12 PM UTC")
.webhook(WebhookDto.builder()
.url("https://your-endpoint.com/webhook")
.method(WebhookDtoMethod.POST)
.headers(Map.of("Authorization", "Bearer YOUR_TOKEN"))
.build())
.build()
);
String monitorId = monitor.getMonitorId().orElseThrow();
System.out.println("Monitor created: " + monitorId);
// Update webhook
client.monitors().updateMonitor(
monitorId,
UpdateMonitorRequestDto.builder()
.webhook(WebhookDto.builder()
.url("https://new-endpoint.com/webhook")
.method(WebhookDtoMethod.POST)
.build())
.build()
);
// List all monitors
ListMonitorsResponseDto allMonitors = client.monitors().listMonitors();
allMonitors.getMonitors().forEach(m -> {
String status = m.getEnabled() ? "active" : "paused";
System.out.println(String.format("%s: %s", m.getMonitorId(), status));
});
// Control execution
client.monitors().disableMonitor(monitorId);
client.monitors().enableMonitor(monitorId);
// List execution history
ListMonitorJobsResponse jobs = client.monitors().listMonitorJobs(
monitorId,
ListMonitorJobsRequest.builder().sort("desc").build()
);
System.out.println("\nMonitor executed " + jobs.getTotalJobs() + " jobs");
jobs.getJobs().forEach(job -> {
System.out.println(String.format(
" Job %s: %s to %s",
job.getJobId(),
job.getStartDate(),
job.getEndDate()
));
});
// Get aggregated results
PullMonitorResponseDto results = client.monitors().pullMonitorResults(monitorId);
System.out.println("\nCollected " + results.getRecords().orElse(0) + " total records");
results.getAllRecords().ifPresent(records ->
records.forEach(record -> System.out.println(" " + record.getRecordTitle()))
);
} catch (NewscatcherApiApiException e) {
System.err.println("Status: " + e.statusCode());
System.err.println("Error: " + e.body());
}
}
}
Error handling
Handle API errors with structured exception handling:Copy
Ask AI
import com.newscatcher.catchall.core.NewscatcherApiApiException;
try {
client.jobs().createJob(
SubmitRequestDto.builder()
.query("AI company acquisitions")
.build()
);
} catch (NewscatcherApiApiException e) {
System.err.println("Status: " + e.statusCode());
System.err.println("Error: " + e.body());
}
Advanced usage
Pagination
Retrieve large result sets page by page:Copy
Ask AI
import com.newscatcher.catchall.resources.jobs.requests.GetJobResultsRequest;
int page = 1;
while (true) {
var results = client.jobs().getJobResults(
jobId,
GetJobResultsRequest.builder()
.page(page)
.pageSize(100)
.build()
);
System.out.println("Page " + page + "/" + results.getTotalPages().orElse(1));
results.getAllRecords().ifPresent(records ->
records.forEach(record ->
System.out.println(" " + record.getRecordTitle())
)
);
if (results.getPage().orElse(0) >= results.getTotalPages().orElse(1)) {
break;
}
page++;
}
Timeouts
Configure custom timeouts at client or request level:- Client-level
- Request-level
Copy
Ask AI
CatchAllApi client = CatchAllApi.builder()
.apiKey("YOUR_API_KEY")
.timeout(30)
.build();
Copy
Ask AI
import com.newscatcher.catchall.core.RequestOptions;
client.jobs().createJob(
SubmitRequestDto.builder()
.query("AI company acquisitions")
.build(),
RequestOptions.builder()
.timeout(10)
.build()
);
Retries
Configure automatic retry behavior for failed requests:- Client-level
- Request-level
Copy
Ask AI
CatchAllApi client = CatchAllApi.builder()
.apiKey("YOUR_API_KEY")
.maxRetries(3)
.build();
Copy
Ask AI
client.jobs().createJob(
SubmitRequestDto.builder()
.query("AI company acquisitions")
.build(),
RequestOptions.builder()
.maxRetries(3)
.build()
);

