Code Examples
Practical, ready-to-use code examples for common Checkmate API operations in multiple programming languages.
Complete Examples
1. Create Project → Add Tests → Create Run
- Node.js
- Python
const axios = require('axios');
const API_TOKEN = process.env.CHECKMATE_TOKEN;
const BASE_URL = 'http://localhost:3000';
const api = axios.create({
baseURL: BASE_URL,
headers: {
'Authorization': `Bearer ${API_TOKEN}`,
'Content-Type': 'application/json'
}
});
async function completeWorkflow() {
try {
// 1. Create a project
console.log('Creating project...');
const projectResponse = await api.post('/api/v1/project/create', {
projectName: 'Mobile App Testing',
description: 'Test suite for iOS and Android',
orgId: 1
});
const projectId = projectResponse.data.data.projectId;
console.log(`✓ Project created with ID: ${projectId}`);
// 2. Create test cases
console.log('\nCreating test cases...');
const testIds = [];
const tests = [
{
title: 'Verify user login',
description: 'Test login functionality',
steps: '1. Open app\n2. Enter credentials\n3. Tap login',
expectedResult: 'User logged in successfully'
},
{
title: 'Verify profile update',
description: 'Test profile editing',
steps: '1. Navigate to profile\n2. Update details\n3. Save',
expectedResult: 'Profile updated successfully'
},
{
title: 'Verify logout',
description: 'Test logout functionality',
steps: '1. Tap logout\n2. Confirm',
expectedResult: 'User logged out'
}
];
for (const test of tests) {
const testResponse = await api.post('/api/v1/test/create', {
...test,
projectId,
sectionId: 1, // Adjust based on your setup
priorityId: 2,
automationStatusId: 1
});
testIds.push(testResponse.data.data.testId);
console.log(`✓ Created test: ${test.title}`);
}
// 3. Create a test run
console.log('\nCreating test run...');
const runResponse = await api.post('/api/v1/run/create', {
runName: 'Sprint 1 Regression',
projectId,
testIds
});
const runId = runResponse.data.data.runId;
console.log(`✓ Run created with ID: ${runId}`);
// 4. Update test statuses
console.log('\nUpdating test statuses...');
await api.put('/api/v1/run/update-test-status', {
runId,
testIdStatusArray: [
{ testId: testIds[0], status: 'Passed' },
{ testId: testIds[1], status: 'Failed' },
{ testId: testIds[2], status: 'Passed' }
],
comment: 'Initial test execution'
});
console.log('✓ Test statuses updated');
console.log('\n✅ Workflow completed successfully!');
return { projectId, testIds, runId };
} catch (error) {
console.error('Error:', error.response?.data || error.message);
throw error;
}
}
// Run the workflow
completeWorkflow()
.then(result => console.log('\nResult:', result))
.catch(err => console.error('Failed:', err));
import requests
import os
from typing import List, Dict
API_TOKEN = os.getenv('CHECKMATE_TOKEN')
BASE_URL = 'http://localhost:3000'
class CheckmateAPI:
def __init__(self, token: str, base_url: str):
self.session = requests.Session()
self.session.headers.update({
'Authorization': f'Bearer {token}',
'Content-Type': 'application/json'
})
self.base_url = base_url
def create_project(self, name: str, description: str, org_id: int) -> int:
"""Create a new project"""
response = self.session.post(
f'{self.base_url}/api/v1/project/create',
json={
'projectName': name,
'description': description,
'orgId': org_id
}
)
response.raise_for_status()
return response.json()['data']['projectId']
def create_test(self, project_id: int, test_data: Dict) -> int:
"""Create a new test case"""
payload = {
**test_data,
'projectId': project_id,
'sectionId': 1, # Adjust as needed
'priorityId': 2,
'automationStatusId': 1
}
response = self.session.post(
f'{self.base_url}/api/v1/test/create',
json=payload
)
response.raise_for_status()
return response.json()['data']['testId']
def create_run(self, project_id: int, run_name: str, test_ids: List[int]) -> int:
"""Create a test run"""
response = self.session.post(
f'{self.base_url}/api/v1/run/create',
json={
'runName': run_name,
'projectId': project_id,
'testIds': test_ids
}
)
response.raise_for_status()
return response.json()['data']['runId']
def update_test_statuses(self, run_id: int, statuses: List[Dict], comment: str = ''):
"""Update test statuses in a run"""
response = self.session.put(
f'{self.base_url}/api/v1/run/update-test-status',
json={
'runId': run_id,
'testIdStatusArray': statuses,
'comment': comment
}
)
response.raise_for_status()
return response.json()
def main():
# Initialize API client
api = CheckmateAPI(API_TOKEN, BASE_URL)
# 1. Create project
print('Creating project...')
project_id = api.create_project(
name='Mobile App Testing',
description='Test suite for iOS and Android',
org_id=1
)
print(f'✓ Project created with ID: {project_id}')
# 2. Create test cases
print('\nCreating test cases...')
tests = [
{
'title': 'Verify user login',
'description': 'Test login functionality',
'steps': '1. Open app\n2. Enter credentials\n3. Tap login',
'expectedResult': 'User logged in successfully'
},
{
'title': 'Verify profile update',
'description': 'Test profile editing',
'steps': '1. Navigate to profile\n2. Update details\n3. Save',
'expectedResult': 'Profile updated successfully'
},
{
'title': 'Verify logout',
'description': 'Test logout functionality',
'steps': '1. Tap logout\n2. Confirm',
'expectedResult': 'User logged out'
}
]
test_ids = []
for test in tests:
test_id = api.create_test(project_id, test)
test_ids.append(test_id)
print(f'✓ Created test: {test["title"]}')
# 3. Create test run
print('\nCreating test run...')
run_id = api.create_run(project_id, 'Sprint 1 Regression', test_ids)
print(f'✓ Run created with ID: {run_id}')
# 4. Update test statuses
print('\nUpdating test statuses...')
api.update_test_statuses(
run_id=run_id,
statuses=[
{'testId': test_ids[0], 'status': 'Passed'},
{'testId': test_ids[1], 'status': 'Failed'},
{'testId': test_ids[2], 'status': 'Passed'}
],
comment='Initial test execution'
)
print('✓ Test statuses updated')
print('\n✅ Workflow completed successfully!')
return {'projectId': project_id, 'testIds': test_ids, 'runId': run_id}
if __name__ == '__main__':
try:
result = main()
print(f'\nResult: {result}')
except Exception as e:
print(f'Error: {str(e)}')
2. Bulk Test Import from CSV
- Python
- Node.js
- cURL
import requests
import csv
import os
API_TOKEN = os.getenv('CHECKMATE_TOKEN')
BASE_URL = 'http://localhost:3000'
def bulk_import_tests(csv_file_path: str, project_id: int):
"""Import tests from CSV file"""
# Prepare headers
headers = {
'Authorization': f'Bearer {API_TOKEN}'
}
# Open and send file
with open(csv_file_path, 'rb') as file:
files = {'file': ('tests.csv', file, 'text/csv')}
data = {'projectId': project_id}
response = requests.post(
f'{BASE_URL}/api/v1/tests/upload',
headers=headers,
files=files,
data=data
)
if response.status_code == 201:
result = response.json()
print(f"✅ Successfully imported {len(result['data']['created'])} tests")
return result
else:
print(f"❌ Import failed: {response.json()}")
return None
# Example usage
bulk_import_tests('tests.csv', project_id=1)
const axios = require('axios');
const FormData = require('form-data');
const fs = require('fs');
const API_TOKEN = process.env.CHECKMATE_TOKEN;
const BASE_URL = 'http://localhost:3000';
async function bulkImportTests(csvFilePath, projectId) {
try {
const form = new FormData();
form.append('file', fs.createReadStream(csvFilePath));
form.append('projectId', projectId);
const response = await axios.post(
`${BASE_URL}/api/v1/tests/upload`,
form,
{
headers: {
'Authorization': `Bearer ${API_TOKEN}`,
...form.getHeaders()
}
}
);
console.log(`✅ Successfully imported ${response.data.data.created.length} tests`);
return response.data;
} catch (error) {
console.error('❌ Import failed:', error.response?.data || error.message);
throw error;
}
}
// Example usage
bulkImportTests('tests.csv', 1)
.then(result => console.log('Result:', result))
.catch(err => console.error('Error:', err));
#!/bin/bash
API_TOKEN="your_token_here"
BASE_URL="http://localhost:3000"
CSV_FILE="tests.csv"
PROJECT_ID=1
curl -X POST "${BASE_URL}/api/v1/tests/upload" \
-H "Authorization: Bearer ${API_TOKEN}" \
-F "file=@${CSV_FILE}" \
-F "projectId=${PROJECT_ID}"
CSV Format:
title,description,sectionId,priorityId,automationStatusId,steps,expectedResult
"User Login Test","Verify user can login",5,1,2,"1. Open app\n2. Enter credentials\n3. Click login","User successfully logged in"
"Profile Edit Test","Verify profile editing",5,2,1,"1. Go to profile\n2. Edit details\n3. Save","Profile updated"
3. Generate Daily Test Report
- Python
import requests
import os
from datetime import datetime, timedelta
import csv
API_TOKEN = os.getenv('CHECKMATE_TOKEN')
BASE_URL = 'http://localhost:3000'
def generate_daily_report(project_id: int, output_file: str = None):
"""Generate a daily test execution report"""
headers = {
'Authorization': f'Bearer {API_TOKEN}',
'Content-Type': 'application/json'
}
# Get all runs for the project
response = requests.get(
f'{BASE_URL}/api/v1/project/runs',
headers=headers,
params={'projectId': project_id, 'page': 1, 'pageSize': 100}
)
runs = response.json()['data']['runs']
# Filter runs from last 24 hours
yesterday = datetime.now() - timedelta(days=1)
recent_runs = [
run for run in runs
if datetime.fromisoformat(run['createdOn'].replace('Z', '+00:00')) > yesterday
]
print(f"\n📊 Daily Report for Project {project_id}")
print(f"Date: {datetime.now().strftime('%Y-%m-%d')}")
print(f"{'='*60}\n")
report_data = []
for run in recent_runs:
# Get run details
run_response = requests.get(
f'{BASE_URL}/api/v1/run',
headers=headers,
params={'runId': run['runId'], 'page': 1, 'pageSize': 1000}
)
run_details = run_response.json()['data']
# Calculate statistics
total_tests = run_details['totalCount']
passed = sum(1 for t in run_details['tests'] if t.get('status') == 'Passed')
failed = sum(1 for t in run_details['tests'] if t.get('status') == 'Failed')
blocked = sum(1 for t in run_details['tests'] if t.get('status') == 'Blocked')
untested = sum(1 for t in run_details['tests'] if t.get('status') == 'Untested')
pass_rate = (passed / total_tests * 100) if total_tests > 0 else 0
print(f"Run: {run['runName']}")
print(f" Total Tests: {total_tests}")
print(f" ✓ Passed: {passed} ({pass_rate:.1f}%)")
print(f" ✗ Failed: {failed}")
print(f" ⊘ Blocked: {blocked}")
print(f" ○ Untested: {untested}")
print(f" Created By: {run['createdBy']}")
print(f" Status: {'🔒 Locked' if run['isLocked'] else '🔓 Active'}\n")
report_data.append({
'Run Name': run['runName'],
'Total Tests': total_tests,
'Passed': passed,
'Failed': failed,
'Blocked': blocked,
'Untested': untested,
'Pass Rate %': f"{pass_rate:.1f}",
'Created By': run['createdBy'],
'Created On': run['createdOn'],
'Locked': 'Yes' if run['isLocked'] else 'No'
})
# Save to CSV if output file specified
if output_file and report_data:
with open(output_file, 'w', newline='') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=report_data[0].keys())
writer.writeheader()
writer.writerows(report_data)
print(f"📄 Report saved to: {output_file}")
return report_data
# Example usage
generate_daily_report(project_id=1, output_file='daily_report.csv')
4. Sync Tests from Selenium/Pytest
- Python + Pytest
import pytest
import requests
import os
import inspect
API_TOKEN = os.getenv('CHECKMATE_TOKEN')
BASE_URL = 'http://localhost:3000'
PROJECT_ID = 1
class CheckmateSyncPlugin:
"""Pytest plugin to sync tests with Checkmate"""
def __init__(self):
self.api_headers = {
'Authorization': f'Bearer {API_TOKEN}',
'Content-Type': 'application/json'
}
self.test_mapping = {} # Maps pytest test names to Checkmate test IDs
def pytest_collection_finish(self, session):
"""Sync test cases after collection"""
print("\n🔄 Syncing tests with Checkmate...")
for item in session.items:
# Extract test information
test_name = item.name
test_doc = inspect.getdoc(item.function) or "No description"
test_file = item.fspath.relto(session.startdir)
# Check if test exists in Checkmate
test_id = self.find_or_create_test(test_name, test_doc, test_file)
self.test_mapping[test_name] = test_id
print(f"✅ Synced {len(self.test_mapping)} tests")
def find_or_create_test(self, title, description, link):
"""Find existing test or create new one"""
# Search for existing test
search_response = requests.get(
f'{BASE_URL}/api/v1/project/tests',
headers=self.api_headers,
params={
'projectId': PROJECT_ID,
'page': 1,
'pageSize': 1000,
'textSearch': title
}
)
tests = search_response.json()['data']['tests']
existing = next((t for t in tests if t['title'] == title), None)
if existing:
return existing['testId']
# Create new test
create_response = requests.post(
f'{BASE_URL}/api/v1/test/create',
headers=self.api_headers,
json={
'title': title,
'description': description,
'projectId': PROJECT_ID,
'sectionId': 1,
'priorityId': 2,
'automationStatusId': 2, # Automated
'link': link
}
)
return create_response.json()['data']['testId']
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(self, item, call):
"""Update test status after execution"""
outcome = yield
report = outcome.get_result()
if report.when == 'call':
test_name = item.name
test_id = self.test_mapping.get(test_name)
if test_id:
status = 'Passed' if report.passed else 'Failed'
self.update_test_result(test_id, status, str(report.longrepr) if report.failed else '')
def update_test_result(self, test_id, status, comment):
"""Update test result in Checkmate"""
# This would typically be done in the context of a run
# For simplicity, this is a placeholder
pass
# Register the plugin
def pytest_configure(config):
config.pluginmanager.register(CheckmateSyncPlugin())
# Example tests
def test_user_login():
"""Verify user can login with valid credentials"""
# Your test code here
assert True
def test_user_logout():
"""Verify user can logout successfully"""
# Your test code here
assert True
5. CI/CD Integration (GitHub Actions)
- GitHub Actions
# .github/workflows/test-execution.yml
name: Test Execution
on:
push:
branches: [ main, develop ]
pull_request:
branches: [ main ]
schedule:
- cron: '0 9 * * 1-5' # Run weekdays at 9 AM
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install dependencies
run: |
pip install -r requirements.txt
- name: Create Checkmate Run
id: create_run
env:
CHECKMATE_TOKEN: ${{ secrets.CHECKMATE_API_TOKEN }}
CHECKMATE_URL: ${{ secrets.CHECKMATE_URL }}
run: |
RUN_ID=$(python scripts/create_run.py)
echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT
- name: Run tests
run: |
pytest tests/ --json-report --json-report-file=test_results.json
- name: Update Checkmate Results
if: always()
env:
CHECKMATE_TOKEN: ${{ secrets.CHECKMATE_API_TOKEN }}
CHECKMATE_URL: ${{ secrets.CHECKMATE_URL }}
RUN_ID: ${{ steps.create_run.outputs.run_id }}
run: |
python scripts/update_results.py
- name: Lock Run
if: always()
env:
CHECKMATE_TOKEN: ${{ secrets.CHECKMATE_API_TOKEN }}
CHECKMATE_URL: ${{ secrets.CHECKMATE_URL }}
RUN_ID: ${{ steps.create_run.outputs.run_id }}
run: |
python scripts/lock_run.py
scripts/create_run.py:
import requests
import os
import sys
from datetime import datetime
API_TOKEN = os.getenv('CHECKMATE_TOKEN')
BASE_URL = os.getenv('CHECKMATE_URL')
PROJECT_ID = 1
# Get all test IDs for the project
response = requests.get(
f'{BASE_URL}/api/v1/project/tests',
headers={'Authorization': f'Bearer {API_TOKEN}'},
params={'projectId': PROJECT_ID, 'page': 1, 'pageSize': 1000}
)
test_ids = [test['testId'] for test in response.json()['data']['tests']]
# Create run
run_name = f"CI Run - {datetime.now().strftime('%Y-%m-%d %H:%M')}"
run_response = requests.post(
f'{BASE_URL}/api/v1/run/create',
headers={
'Authorization': f'Bearer {API_TOKEN}',
'Content-Type': 'application/json'
},
json={
'runName': run_name,
'projectId': PROJECT_ID,
'testIds': test_ids
}
)
run_id = run_response.json()['data']['runId']
print(run_id) # Output to stdout for GitHub Actions
scripts/update_results.py:
import requests
import os
import json
API_TOKEN = os.getenv('CHECKMATE_TOKEN')
BASE_URL = os.getenv('CHECKMATE_URL')
RUN_ID = os.getenv('RUN_ID')
# Read test results
with open('test_results.json') as f:
results = json.load(f)
# Map pytest results to Checkmate statuses
status_updates = []
for test in results['tests']:
status = 'Passed' if test['outcome'] == 'passed' else 'Failed'
# You would need to map test names to test IDs
# This is simplified for demonstration
test_id = test.get('checkmate_id') # Assuming you store this
if test_id:
status_updates.append({
'testId': test_id,
'status': status
})
# Update statuses
requests.put(
f'{BASE_URL}/api/v1/run/update-test-status',
headers={
'Authorization': f'Bearer {API_TOKEN}',
'Content-Type': 'application/json'
},
json={
'runId': int(RUN_ID),
'testIdStatusArray': status_updates,
'comment': f'Automated execution via CI/CD'
}
)
print(f"✅ Updated {len(status_updates)} test statuses")
Quick Snippets
Get Test Statistics
def get_test_statistics(project_id):
response = requests.get(
f'{BASE_URL}/api/v1/project/tests',
headers={'Authorization': f'Bearer {API_TOKEN}'},
params={'projectId': project_id, 'page': 1, 'pageSize': 10000}
)
tests = response.json()['data']['tests']
stats = {
'total': len(tests),
'by_priority': {},
'by_automation': {},
'by_section': {}
}
for test in tests:
# Count by priority
priority = test.get('priorityName', 'Unknown')
stats['by_priority'][priority] = stats['by_priority'].get(priority, 0) + 1
# Count by automation status
automation = test.get('automationStatusName', 'Unknown')
stats['by_automation'][automation] = stats['by_automation'].get(automation, 0) + 1
# Count by section
section = test.get('sectionName', 'Unknown')
stats['by_section'][section] = stats['by_section'].get(section, 0) + 1
return stats
Bulk Update Test Priority
async function updateTestPriorities(testIds, newPriorityId) {
return await api.put('/api/v1/tests/update', {
testIds,
propertiesToUpdate: {
priorityId: newPriorityId
}
});
}
// Usage
await updateTestPriorities([101, 102, 103], 1); // Set to Critical
Download and Parse Test Report
def download_and_analyze_report(project_id):
# Download report
response = requests.get(
f'{BASE_URL}/api/v1/project/download-tests',
headers={'Authorization': f'Bearer {API_TOKEN}'},
params={'projectId': project_id}
)
# Save to file
with open('tests.csv', 'wb') as f:
f.write(response.content)
# Parse and analyze
import csv
with open('tests.csv', 'r') as f:
reader = csv.DictReader(f)
tests = list(reader)
print(f"Total Tests: {len(tests)}")
print(f"Automated: {sum(1 for t in tests if 'Automated' in t.get('Automation Status', ''))}")
print(f"Manual: {sum(1 for t in tests if 'Manual' in t.get('Automation Status', ''))}")
Next Steps
tip
Have a use case not covered here? Open an issue or join our Discord!