Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ Once you have installed the pre-release version of PyCentral, you need to obtain

#### 1. **New Central Authentication**
For New Central, you must obtain the following details before making API requests:
- **Base URL**: This is the API Gateway URL for your New Central account based on the geographical cluster of your account on the HPE GreenLake Platform. You can find the base URL of your New Central account's API Gateway from the table [here](https://developer.arubanetworks.com/new-hpe-anw-central/docs/getting-started-with-rest-apis#base-urls).
- **Client ID and Client Secret**: These credentials are required to generate an access token to authenticate API requests. You can obtain them by creating a Personal API Client for your New Central Account. Follow the detailed steps in the [Create Client Credentials documentation](https://developer.arubanetworks.com/new-hpe-anw-central/docs/generating-and-managing-access-tokens#create-client-credentials).
- **Base URL or Cluster Name**: Base URL is the API Gateway URL for your New Central account based on the geographical cluster of your account on the HPE GreenLake Platform. You can find the base URL or cluster name of your New Central account's API Gateway from the table [here](https://developer.arubanetworks.com/new-central/docs/getting-started-with-rest-apis#base-urls).
- **Client ID and Client Secret**: These credentials are required to generate an access token to authenticate API requests. You can obtain them by creating a Personal API Client for your New Central Account. Follow the detailed steps in the [Create Client Credentials documentation](https://developer.arubanetworks.com/new-central/docs/generating-and-managing-access-tokens#create-client-credentials).

#### 2. **HPE GreenLake (GLP) Authentication**
If you are working with HPE GreenLake APIs, authentication is slightly different:
Expand Down Expand Up @@ -118,5 +118,5 @@ The Classic Central functionality is still fully supported by the SDK and has be
- <a href="https://pycentral.readthedocs.io/en/latest/" target="_blank">Python package documentation</a>

### **Use-Cases and Workflows**
- <a href="https://developer.arubanetworks.com/aruba-central/docs/python-getting-started" target="_blank">HPE Aruba Networking Developer Hub</a>
- <a href="https://github.com/aruba/central-python-workflows" target="_blank">central-python-workflows</a>
- <a href="https://developer.arubanetworks.com/central/docs/python-getting-started" target="_blank">HPE Aruba Networking Developer Hub</a>
- <a href="https://github.com/aruba/central-python-workflows/tree/main" target="_blank">central-python-workflows</a>
57 changes: 48 additions & 9 deletions pycentral/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
console_logger,
save_access_token,
)
from .scopes import Scopes
from .utils.url_utils import NewCentralURLs
from .exceptions import LoginError, ResponseError

Expand All @@ -21,9 +22,13 @@


class NewCentralBase:
def __init__(self, token_info, logger=None, log_level="DEBUG"):
def __init__(
self, token_info, logger=None, log_level="DEBUG", enable_scope=False
):
"""
Initialize the NewCentralBase class.
This constructor initializes the NewCentralBase class with token information, logging configuration,
and optional scope management. It validates and processes the provided token information, sets up
logging, and optionally initializes scope-related functionality.

:param token_info: Dictionary containing token information for supported applications - new_central, glp.
Can also be a string path to a YAML or JSON file with token information.
Expand All @@ -32,19 +37,27 @@ def __init__(self, token_info, logger=None, log_level="DEBUG"):
:type logger: logging.Logger, optional
:param log_level: Logging level, defaults to "DEBUG".
:type log_level: str, optional
:param enable_scope: Flag to enable scope management. If True, the SDK will automatically fetch data
about existing scopes and associated profiles, simplifying scope and configuration
management. If False, scope-related API calls are disabled, resulting in faster
initialization. Defaults to False.
:type enable_scope: bool, optional
"""
self.token_info = new_parse_input_args(token_info)
self.token_file_path = None
if isinstance(token_info, str):
self.token_file_path = token_info
self.logger = self.set_logger(log_level, logger)
self.scopes = None
for app in self.token_info:
app_token_info = self.token_info[app]
if (
"access_token" not in app_token_info
or app_token_info["access_token"] is None
):
self.create_token(app)
if enable_scope:
self.scopes = Scopes(central_conn=self)

def set_logger(self, log_level, logger=None):
"""
Expand Down Expand Up @@ -86,13 +99,17 @@ def create_token(self, app_name):

try:
self.logger.info(f"Attempting to create new token from {app_name}")
token = oauth.fetch_token(token_url=urls.Authentication["OAUTH"], auth=auth)
token = oauth.fetch_token(
token_url=urls.Authentication["OAUTH"], auth=auth
)

if "access_token" in token:
self.logger.info(
f"{app_name} Login Successful.. Obtained Access Token!"
)
self.token_info[app_name]["access_token"] = token["access_token"]
self.token_info[app_name]["access_token"] = token[
"access_token"
]
if self.token_file_path:
save_access_token(
app_name,
Expand All @@ -118,10 +135,13 @@ def handle_expired_token(self, app_name):
:param app_name: Name of the application.
:type app_name: str
"""
self.logger.info(f"{app_name} access Token has expired.")
self.logger.info("Handling Token Expiry...")
self.logger.info(
f"{app_name} access Token has expired. Handling Token Expiry..."
)
client_id, client_secret = self._return_client_credentials(app_name)
if any(credential is None for credential in [client_id, client_secret]):
if any(
credential is None for credential in [client_id, client_secret]
):
exit(
f"Please provide client_id and client_secret in {app_name} required to generate an access token"
)
Expand Down Expand Up @@ -167,7 +187,9 @@ def command(
limit_reached = False
try:
while not limit_reached:
url = build_url(self.token_info[app_name]["base_url"], api_path)
url = build_url(
self.token_info[app_name]["base_url"], api_path
)

if not headers and not files:
headers = {
Expand Down Expand Up @@ -262,7 +284,9 @@ def request_url(
data=data,
)
prepped = s.prepare_request(req)
settings = s.merge_environment_settings(prepped.url, {}, None, True, None)
settings = s.merge_environment_settings(
prepped.url, {}, None, True, None
)
try:
resp = s.send(prepped, **settings)
return resp
Expand Down Expand Up @@ -318,6 +342,21 @@ def _return_client_credentials(self, app_name):
client_secret = app_token_info["client_secret"]
return client_id, client_secret

def get_scopes(self):
"""
Sets up the scopes for the current instance by creating a Scopes object.

This method initializes the `scopes` attribute using the `Scopes` class,
passing the current instance (`self`) as the `central_conn` parameter.
If the `scopes` attribute is already initialized, it simply returns the existing object.

Returns:
Scopes: The initialized or existing Scopes object.
"""
if self.scopes is None:
self.scopes = Scopes(central_conn=self)
return self.scopes


class BearerAuth(requests.auth.AuthBase):
"""This class uses Bearer Auth method to generate the authorization header
Expand Down
60 changes: 45 additions & 15 deletions pycentral/glp/devices.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,9 @@ def get_all_devices(self, conn, select=None):
device_list = []

while True:
resp = self.get_device(conn, limit=limit, offset=offset, select=select)
resp = self.get_device(
conn, limit=limit, offset=offset, select=select
)
if resp["code"] != 200:
conn.logger.error(
f"Error fetching list of devices: {resp['code']} - {resp['msg']}"
Expand Down Expand Up @@ -171,7 +173,9 @@ def add_devices(self, conn, network=[], compute=[], storage=[]):

# Check for rate limit handler
if count > INPUT_SIZE:
conn.logger.info("WARNING MORE THAN 5 DEVICES IS AN ALPHA FEATURE!")
conn.logger.info(
"WARNING MORE THAN 5 DEVICES IS AN ALPHA FEATURE!"
)
resp_list.append(self.__add_dev("network", network))
resp_list.append(self.__add_dev("compute", compute))
resp_list.append(self.__add_dev("storage", storage))
Expand Down Expand Up @@ -209,7 +213,9 @@ def __add_dev(self, conn, type, inputs):
data = {"network": [], "compute": [], "storage": []}

if len(inputs) > INPUT_SIZE:
split_input, wait_time = rate_limit_check(inputs, INPUT_SIZE, POST_RPM)
split_input, wait_time = rate_limit_check(
inputs, INPUT_SIZE, POST_RPM
)

resp_list = []

Expand All @@ -219,7 +225,9 @@ def __add_dev(self, conn, type, inputs):
data["storage"] = devices if type == "storage" else []
resp = conn.command("POST", path, "glp", api_data=data)
if resp["code"] != 202:
conn.logger.error(f"Add device request failed for {inputs}!")
conn.logger.error(
f"Add device request failed for {inputs}!"
)
else:
conn.logger.info("Add device request accepted...")
resp_list.append(resp)
Expand Down Expand Up @@ -284,7 +292,9 @@ def add_sub(self, conn, devices, sub, serial=False, key=False):

# Split devices list per input size.
if len(devices) > INPUT_SIZE:
split_input, wait_time = rate_limit_check(devices, INPUT_SIZE, PATCH_RPM)
split_input, wait_time = rate_limit_check(
devices, INPUT_SIZE, PATCH_RPM
)
conn.logger.info("WARNING MORE THAN 5 DEVICES IS A BETA FEATURE!")

# Setup variables for iterating commands.
Expand All @@ -296,13 +306,17 @@ def add_sub(self, conn, devices, sub, serial=False, key=False):
for inputs in queue:
params = {"id": inputs}

resp = conn.command("PATCH", path, "glp", api_params=params, api_data=body)
resp = conn.command(
"PATCH", path, "glp", api_params=params, api_data=body
)
if resp["code"] == 202:
conn.logger.info("Add sub request accepted...")
id = resp["msg"]["transactionId"]
status = check_progress(conn, id, self, limit=PATCH_RPM)
if status[0]:
conn.logger.info("Sucessfully added subscriptions to devices!")
conn.logger.info(
"Sucessfully added subscriptions to devices!"
)
resp_list.append(status[1])
else:
conn.logger.error("Add subscription failed!")
Expand Down Expand Up @@ -350,7 +364,9 @@ def remove_sub(self, conn, devices, serial=False):

# Split devices list per input size.
if len(devices) > INPUT_SIZE:
split_input, wait_time = rate_limit_check(devices, INPUT_SIZE, PATCH_RPM)
split_input, wait_time = rate_limit_check(
devices, INPUT_SIZE, PATCH_RPM
)
conn.logger.info("WARNING MORE THAN 5 DEVICES IS A BETA FEATURE!")

# Setup variables for iterating commands.
Expand All @@ -362,13 +378,17 @@ def remove_sub(self, conn, devices, serial=False):
for inputs in queue:
params = {"id": inputs}

resp = conn.command("PATCH", path, "glp", api_params=params, api_data=body)
resp = conn.command(
"PATCH", path, "glp", api_params=params, api_data=body
)
if resp["code"] == 202:
conn.logger.info("Remove sub request accepted...")
id = resp["msg"]["transactionId"]
status = check_progress(conn, id, self, limit=PATCH_RPM)
if status[0]:
conn.logger.info("Sucessfully Removed subscriptions from devices!")
conn.logger.info(
"Sucessfully Removed subscriptions from devices!"
)
resp_list.append(status[1])
else:
conn.logger.error("Remove subscription failed!")
Expand Down Expand Up @@ -449,11 +469,15 @@ def assign_devices(
)

if resp["code"] == 202:
conn.logger.info("Assign device(s) to application request accepted...")
conn.logger.info(
"Assign device(s) to application request accepted..."
)
id = resp["msg"]["transactionId"]
status = check_progress(conn, id, self, limit=PATCH_RPM)
if status[0]:
conn.logger.info("Sucessfully assigned device(s) to application!")
conn.logger.info(
"Sucessfully assigned device(s) to application!"
)
return status[1]
else:
conn.logger.error("Assign device(s) to application failed!")
Expand Down Expand Up @@ -526,10 +550,16 @@ def unassign_devices(self, conn, devices=None, serial=False):
id = resp["msg"]["transactionId"]
status = check_progress(conn, id, self, limit=PATCH_RPM)
if status[0]:
conn.logger.info("Sucessfully unassigned device(s) from application!")
conn.logger.info(
"Sucessfully unassigned device(s) from application!"
)
return status[1]
else:
conn.logger.error("Unassign device(s) from application failed!")
conn.logger.error(
"Unassign device(s) from application failed!"
)
return status[1]
conn.logger.error("Bad request for unassign device(s) from application!")
conn.logger.error(
"Bad request for unassign device(s) from application!"
)
return resp
13 changes: 7 additions & 6 deletions pycentral/glp/service_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ def get_application_id_and_region(self, conn, application_name, region):
)
return None

region_service_manager_mapping = self._generate_application_region_mapping(
resp["msg"]["items"]
region_service_manager_mapping = (
self._generate_application_region_mapping(resp["msg"]["items"])
)
if region not in region_service_manager_mapping.keys():
conn.logger.error(
Expand All @@ -49,9 +49,9 @@ def get_application_id_and_region(self, conn, application_name, region):
f"Unable to find service manager with name {application_name}. \nValid service managers(applications) in region {region} are {', '.join(region_service_managers)}"
)
return None
service_manager_id = region_service_manager_mapping[region]["serviceManagers"][
application_name
]
service_manager_id = region_service_manager_mapping[region][
"serviceManagers"
][application_name]

resp = self.get_service_manager_provisions(conn)
if resp["code"] != 200:
Expand All @@ -61,7 +61,8 @@ def get_application_id_and_region(self, conn, application_name, region):
return None
for provisioned_service in resp["msg"]["items"]:
if (
service_manager_id == provisioned_service["serviceManager"]["id"]
service_manager_id
== provisioned_service["serviceManager"]["id"]
and provisioned_service["region"] == api_region_name
):
conn.logger.info(
Expand Down
8 changes: 6 additions & 2 deletions pycentral/glp/subscriptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,11 +196,15 @@ def add_subscription(self, conn, subscriptions=None, limit=0, offset=0):
)

if resp["code"] == 202:
conn.logger.info("Add subscription(s) to workspace request accepted...")
conn.logger.info(
"Add subscription(s) to workspace request accepted..."
)
id = resp["msg"]["transactionId"]
status = check_progress(conn, id, self, limit=SUB_LIMIT)
if status[0]:
conn.logger.info("Sucessfully added subscription(s) to workspace!")
conn.logger.info(
"Sucessfully added subscription(s) to workspace!"
)
return status[1]
else:
conn.logger.error("Add subscription(s) to workspace failed!")
Expand Down
9 changes: 9 additions & 0 deletions pycentral/profiles/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# (C) Copyright 2025 Hewlett Packard Enterprise Development LP.
# MIT License

from .profiles import Profiles
from .policy import Policy
from .role import Role
from .system_info import SystemInfo
from .vlan import Vlan
from .wlan import Wlan
Loading