Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions aciClient/aci.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,10 +167,14 @@ def getJsonPaged(self, uri) -> {}:
page = 0

while True:
parsed_query.extend([('page', page), ('page-size', '50000')])
if page == 0:
parsed_query.extend([('page-size', '50000'), ('page', page)])
else:
parsed_query[-1] = ('page', page)

page += 1
url_to_call = urlunparse((parsed_url[0], parsed_url[1], parsed_url[2], parsed_url[3],
urlencode(parsed_query), parsed_url[5]))
urlencode(parsed_query, safe="|"), parsed_url[5]))
response = self.session.get(url_to_call, verify=False)

if response.ok:
Expand Down Expand Up @@ -253,7 +257,7 @@ def snapshot(self, description="snapshot", target_dn="") -> bool:
self.__logger.error(f'snapshot creation not succesfull: {response}')
return False

# ==============================================================================
# ==============================================================================
# subscribe
# ==============================================================================
def subscribe(
Expand Down
45 changes: 45 additions & 0 deletions aciClient/aciCertClient.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,15 @@
import requests
import json

# The modules are named different in python2/python3...
try:
from urlparse import urlparse, urlunparse, parse_qsl
from urllib import urlencode
except ImportError:
from urllib.parse import urlparse, urlunparse, urlencode, parse_qsl

requests.packages.urllib3.disable_warnings()


class ACICert:
__logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -58,6 +67,42 @@ def getJson(self, uri) -> {}:
self.__logger.debug(f'Successful get Data from APIC: {r.json()}')
return r.json()['imdata']

# ==============================================================================
# getJson with Pagination
# ==============================================================================
def getJsonPaged(self, uri) -> {}:
url = self.baseUrl + uri
self.__logger.debug(f'Get Json Pagination called url: {url}')
parsed_url = urlparse(url)
parsed_query = parse_qsl(parsed_url.query)

return_data = []
page = 0

while True:
if page == 0:
parsed_query.extend([('page-size', '50000'), ('page', page)])
else:
parsed_query[-1] = ('page', page)

page += 1
url_to_call = urlunparse((parsed_url[0], parsed_url[1], parsed_url[2], parsed_url[3],
urlencode(parsed_query, safe="|"), parsed_url[5]))
content = f'GET{parsed_url[2]}?{urlencode(parsed_query, safe="|")}'
cookies = self.packCookies(content)
r = requests.get(url_to_call, cookies=cookies, verify=False)

# Raise Exception if http Error occurred
r.raise_for_status()

if r.ok:
responseJson = r.json()
self.__logger.debug(f'Successful get Data from APIC: {responseJson}')
if responseJson['imdata']:
return_data.extend(responseJson['imdata'])
else:
return return_data

# ==============================================================================
# postJson
# ==============================================================================
Expand Down