Skip to content

Commit a7d116d

Browse files
authored
Rework client wrapper and specific exceptions (#1)
1 parent 537bea2 commit a7d116d

File tree

11 files changed

+1119
-747
lines changed

11 files changed

+1119
-747
lines changed

.pre-commit-config.yaml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
repos:
22
- repo: https://github.com/pre-commit/pre-commit-hooks
3-
rev: v4.4.0
3+
rev: v4.5.0
44
hooks:
55
- id: end-of-file-fixer
66
- id: trailing-whitespace
@@ -16,18 +16,18 @@ repos:
1616
- id: fix-byte-order-marker
1717

1818
- repo: https://github.com/asottile/pyupgrade
19-
rev: v3.10.1
19+
rev: v3.15.1
2020
hooks:
2121
- id: pyupgrade
2222
args: [ "--py38-plus", "--keep-runtime-typing" ]
2323

2424
- repo: https://github.com/psf/black
25-
rev: 23.7.0
25+
rev: 24.2.0
2626
hooks:
2727
- id: black
2828

2929
- repo: https://github.com/asottile/blacken-docs
30-
rev: 1.15.0
30+
rev: 1.16.0
3131
hooks:
3232
- id: blacken-docs
3333
additional_dependencies: [ black ]
@@ -37,7 +37,7 @@ repos:
3737
hooks:
3838
- id: seed-isort-config
3939
- repo: https://github.com/pycqa/isort
40-
rev: 5.12.0
40+
rev: 5.13.2
4141
hooks:
4242
- id: isort
4343
args: [ "--profile", "black", "--filter-files" ]

LICENSE

Lines changed: 201 additions & 164 deletions
Large diffs are not rendered by default.

README.rst

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,22 @@
22
Kafka schema registry admin
33
===========================
44

5-
Pure HTTP client library (using requests) to manipulate schemas and definitions into Schema Registry"
5+
Simple / light HTTP client library (using requests) to manipulate schemas and definitions into Schema Registry.
66

7-
API specification is documented `here <https://docs.confluent.io/platform/current/schema-registry/develop/api.html#overview>`__
7+
* Confluent API specification is documented `here <https://docs.confluent.io/platform/current/schema-registry/develop/api.html#overview>`__
88

9+
* RedPanda API specification is documented `here <https://docs.redpanda.com/current/manage/schema-reg/schema-reg-api/>`__
910

10-
Credits
11-
-------
1211

13-
This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
12+
Usage
13+
======
1414

15-
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
16-
.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
15+
Very simple example to manipulate the schema registry and its resources.
16+
17+
.. code-block::
18+
19+
from kafka_schema_registry_admin import SchemaRegistry
20+
21+
registry = SchemaRegistry("http://localhost:8081")
22+
subjects = registry.get_all_subjects()
23+
schemas = registry.get_all_schemas()

docker-compose.yml

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
---
2+
version: '2'
3+
services:
4+
broker:
5+
image: apache/kafka:3.7.0
6+
hostname: broker
7+
container_name: broker
8+
ports:
9+
- "9092:9092"
10+
environment:
11+
KAFKA_NODE_ID: 1
12+
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
13+
KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT_HOST://localhost:9092,PLAINTEXT://broker:19092'
14+
KAFKA_PROCESS_ROLES: 'broker,controller'
15+
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@broker:29093'
16+
KAFKA_LISTENERS: 'CONTROLLER://:29093,PLAINTEXT_HOST://:9092,PLAINTEXT://:19092'
17+
KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
18+
KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
19+
CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
20+
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
21+
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
22+
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
23+
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
24+
KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
25+
26+
schema-registry:
27+
image: confluentinc/cp-schema-registry:7.5.0
28+
hostname: schema-registry
29+
container_name: schema-registry
30+
depends_on:
31+
- broker
32+
ports:
33+
- "8081:8081"
34+
environment:
35+
SCHEMA_REGISTRY_HOST_NAME: schema-registry
36+
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:19092'
37+
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
38+
39+
connect:
40+
image: cricketeerone/apache-kafka-connect
41+
hostname: connect
42+
container_name: connect
43+
depends_on:
44+
- broker
45+
- schema-registry
46+
ports:
47+
- "8083:8083"
48+
environment:
49+
CONNECT_BOOTSTRAP_SERVERS: 'broker:19092'
50+
CONNECT_REST_ADVERTISED_HOST_NAME: connect
51+
CONNECT_GROUP_ID: compose-connect-group
52+
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
53+
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
54+
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
55+
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
56+
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
57+
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
58+
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
59+
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
60+
CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
61+
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
62+
CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
63+
CONNECT_LOG4J_LOGGERS: org.reflections=ERROR
64+
65+
postgresql:
66+
image: postgres:14
67+
hostname: postgresql
68+
volumes:
69+
- pg_data:/var/lib/postgresql/data
70+
environment:
71+
POSTGRES_DB: "conduktor-console"
72+
POSTGRES_USER: "conduktor"
73+
POSTGRES_PASSWORD: "change_me"
74+
POSTGRES_HOST_AUTH_METHOD: "scram-sha-256"
75+
76+
conduktor-console:
77+
image: conduktor/conduktor-console:1.21.1
78+
depends_on:
79+
- postgresql
80+
- broker
81+
ports:
82+
- "8080:8080"
83+
volumes:
84+
- conduktor_data:/var/conduktor
85+
environment:
86+
CDK_DATABASE_URL: "postgresql://conduktor:change_me@postgresql:5432/conduktor-console"
87+
CDK_MONITORING_CORTEX-URL: http://conduktor-monitoring:9009/
88+
CDK_MONITORING_ALERT-MANAGER-URL: http://conduktor-monitoring:9010/
89+
CDK_MONITORING_CALLBACK-URL: http://conduktor-platform:8080/monitoring/api/
90+
CDK_MONITORING_NOTIFICATIONS-CALLBACK-URL: http://localhost:8080
91+
92+
conduktor-monitoring:
93+
image: conduktor/conduktor-console-cortex:1.21.1
94+
environment:
95+
CDK_CONSOLE-URL: "http://conduktor-console:8080"
96+
depends_on:
97+
- conduktor-console
98+
volumes:
99+
pg_data: {}
100+
conduktor_data: {}
Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,12 @@
1-
# SPDX-License-Identifier: LGPL-3.0-only
1+
# SPDX-License-Identifier: Apache License 2.0
22
# Copyright 2021 John Mille <john@ews-network.net>
33

44
"""Top-level package for Kafka schema registry admin."""
55

66
__author__ = """JohnPreston"""
77
__email__ = "john@ews-network.net"
88
__version__ = "0.2.6"
9+
10+
from .kafka_schema_registry_admin import SchemaRegistry
11+
12+
__all__ = ["SchemaRegistry"]
Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
# SPDX-License-Identifier: Apache License 2.0
2+
# Copyright 2021 John Mille <john@ews-network.net>
3+
4+
from __future__ import annotations
5+
6+
from typing import TYPE_CHECKING
7+
8+
if TYPE_CHECKING:
9+
from requests import Response
10+
11+
from urllib.parse import urlparse
12+
13+
import requests
14+
15+
from .errors import evaluate_api_return
16+
17+
18+
class Client:
19+
"""API Client wrapper around the requests"""
20+
21+
def __init__(self, base_url: str):
22+
self._base_url = base_url
23+
24+
self._default_headers: dict = {
25+
"Accept": "application/json",
26+
}
27+
self._post_headers: dict = {
28+
"Accept": "application/json",
29+
"Content-Type": "application/vnd.schemaregistry.v1+json",
30+
}
31+
32+
@evaluate_api_return
33+
def get(self, api_path: str, *args, **kwargs) -> Response:
34+
"""Get the data from the api_path"""
35+
headers = kwargs.get("headers", {})
36+
if not headers:
37+
kwargs["headers"] = headers
38+
headers.update(self._default_headers)
39+
url: str = urlparse(self._base_url + api_path).geturl()
40+
41+
response = requests.get(url, *args, **kwargs)
42+
return response
43+
44+
@evaluate_api_return
45+
def post(self, api_path: str, *args, **kwargs) -> Response:
46+
"""POST the data from the api_path"""
47+
headers = kwargs.get("headers", {})
48+
if not headers:
49+
kwargs["headers"] = headers
50+
headers.update(self._default_headers)
51+
headers.update(self._post_headers)
52+
url: str = urlparse(self._base_url + api_path).geturl()
53+
response = requests.post(url, *args, **kwargs)
54+
return response
55+
56+
@evaluate_api_return
57+
def put(self, api_path: str, *args, **kwargs) -> Response:
58+
"""PUT the data from the api_path"""
59+
headers = kwargs.get("headers", {})
60+
if not headers:
61+
kwargs["headers"] = headers
62+
headers.update(self._default_headers)
63+
url: str = urlparse(self._base_url + api_path).geturl()
64+
65+
response = requests.put(url, *args, **kwargs)
66+
return response
67+
68+
@evaluate_api_return
69+
def delete(self, api_path: str, *args, **kwargs) -> Response:
70+
"""DELETE the data from the api_path"""
71+
headers = kwargs.get("headers", {})
72+
if not headers:
73+
kwargs["headers"] = headers
74+
headers.update(self._default_headers)
75+
76+
url: str = urlparse(self._base_url + api_path).geturl()
77+
response = requests.delete(url, *args, **kwargs)
78+
return response

0 commit comments

Comments
 (0)