1
0
mirror of https://github.com/GNS3/gns3-server synced 2024-11-13 20:08:55 +00:00

Copied fresh cloud files from gns gui repo

This commit is contained in:
Jerry Seutter 2014-10-15 15:50:24 -06:00
parent f287f5141a
commit a833925497
2 changed files with 219 additions and 23 deletions

View File

@ -22,13 +22,24 @@ Base class for interacting with Cloud APIs to create and manage cloud
instances.
"""
from collections import namedtuple
import hashlib
import os
import logging
from io import StringIO, BytesIO
from libcloud.compute.base import NodeAuthSSHKey
from libcloud.storage.types import ContainerAlreadyExistsError, ContainerDoesNotExistError
from .exceptions import ItemNotFound, KeyPairExists, MethodNotAllowed
from .exceptions import OverLimit, BadRequest, ServiceUnavailable
from .exceptions import Unauthorized, ApiError
KeyPair = namedtuple("KeyPair", ['name'], verbose=False)
log = logging.getLogger(__name__)
def parse_exception(exception):
"""
Parse the exception to separate the HTTP status code from the text.
@ -67,6 +78,8 @@ class BaseCloudCtrl(object):
503: ServiceUnavailable
}
GNS3_CONTAINER_NAME = 'GNS3'
def __init__(self, username, api_key):
self.username = username
self.api_key = api_key
@ -89,23 +102,37 @@ class BaseCloudCtrl(object):
return self.driver.list_sizes()
def create_instance(self, name, size, image, keypair):
def list_flavors(self):
""" Return an iterable of flavors """
raise NotImplementedError
def create_instance(self, name, size_id, image_id, keypair):
"""
Create a new instance with the supplied attributes.
Return a Node object.
"""
auth_key = NodeAuthSSHKey(keypair.public_key)
try:
return self.driver.create_node(
name=name,
size=size,
image=image,
auth=auth_key
)
image = self.get_image(image_id)
if image is None:
raise ItemNotFound("Image not found")
size = self.driver.ex_get_size(size_id)
args = {
"name": name,
"size": size,
"image": image,
}
if keypair is not None:
auth_key = NodeAuthSSHKey(keypair.public_key)
args["auth"] = auth_key
args["ex_keyname"] = name
return self.driver.create_node(**args)
except Exception as e:
status, error_text = parse_exception(e)
@ -113,7 +140,8 @@ class BaseCloudCtrl(object):
if status:
self._handle_exception(status, error_text)
else:
raise e
log.error("create_instance method raised an exception: {}".format(e))
log.error('image id {}'.format(image))
def delete_instance(self, instance):
""" Delete the specified instance. Returns True or False. """
@ -142,7 +170,11 @@ class BaseCloudCtrl(object):
def list_instances(self):
""" Return a list of instances in the current region. """
return self.driver.list_nodes()
try:
return self.driver.list_nodes()
except Exception as e:
log.error("list_instances returned an error: {}".format(e))
def create_key_pair(self, name):
""" Create and return a new Key Pair. """
@ -173,7 +205,85 @@ class BaseCloudCtrl(object):
else:
raise e
def delete_key_pair_by_name(self, keypair_name):
""" Utility method to incapsulate boilerplate code """
kp = KeyPair(name=keypair_name)
return self.delete_key_pair(kp)
def list_key_pairs(self):
""" Return a list of Key Pairs. """
return self.driver.list_key_pairs()
def upload_file(self, file_path, folder):
"""
Uploads file to cloud storage (if it is not identical to a file already in cloud storage).
:param file_path: path to file to upload
:param folder: folder in cloud storage to save file in
:return: True if file was uploaded, False if it was skipped because it already existed and was identical
"""
try:
gns3_container = self.storage_driver.create_container(self.GNS3_CONTAINER_NAME)
except ContainerAlreadyExistsError:
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
with open(file_path, 'rb') as file:
local_file_hash = hashlib.md5(file.read()).hexdigest()
cloud_object_name = folder + '/' + os.path.basename(file_path)
cloud_hash_name = cloud_object_name + '.md5'
cloud_objects = [obj.name for obj in gns3_container.list_objects()]
# if the file and its hash are in object storage, and the local and storage file hashes match
# do not upload the file, otherwise upload it
if cloud_object_name in cloud_objects and cloud_hash_name in cloud_objects:
hash_object = gns3_container.get_object(cloud_hash_name)
cloud_object_hash = ''
for chunk in hash_object.as_stream():
cloud_object_hash += chunk.decode('utf8')
if cloud_object_hash == local_file_hash:
return False
file.seek(0)
self.storage_driver.upload_object_via_stream(file, gns3_container, cloud_object_name)
self.storage_driver.upload_object_via_stream(StringIO(local_file_hash), gns3_container, cloud_hash_name)
return True
def list_projects(self):
"""
Lists projects in cloud storage
:return: List of (project name, object name in storage)
"""
try:
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
projects = [
(obj.name.replace('projects/', '').replace('.zip', ''), obj.name)
for obj in gns3_container.list_objects()
if obj.name.startswith('projects/') and obj.name[-4:] == '.zip'
]
return projects
except ContainerDoesNotExistError:
return []
def download_file(self, file_name, destination=None):
"""
Downloads file from cloud storage
:param file_name: name of file in cloud storage to download
:param destination: local path to save file to (if None, returns file contents as a file-like object)
:return: A file-like object if file contents are returned, or None if file is saved to filesystem
"""
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
storage_object = gns3_container.get_object(file_name)
if destination is not None:
storage_object.download(destination)
else:
contents = b''
for chunk in storage_object.as_stream():
contents += chunk
return BytesIO(contents)

View File

@ -23,31 +23,37 @@ import requests
from libcloud.compute.drivers.rackspace import ENDPOINT_ARGS_MAP
from libcloud.compute.providers import get_driver
from libcloud.compute.types import Provider
from libcloud.storage.providers import get_driver as get_storage_driver
from libcloud.storage.types import Provider as StorageProvider
from .exceptions import ItemNotFound, ApiError
from ..version import __version__
from collections import OrderedDict
import logging
log = logging.getLogger(__name__)
RACKSPACE_REGIONS = [{ENDPOINT_ARGS_MAP[k]['region']: k} for k in
ENDPOINT_ARGS_MAP]
GNS3IAS_URL = 'http://localhost:8888' # TODO find a place for this value
class RackspaceCtrl(BaseCloudCtrl):
""" Controller class for interacting with Rackspace API. """
def __init__(self, username, api_key):
def __init__(self, username, api_key, gns3_ias_url):
super(RackspaceCtrl, self).__init__(username, api_key)
self.gns3_ias_url = gns3_ias_url
# set this up so it can be swapped out with a mock for testing
self.post_fn = requests.post
self.driver_cls = get_driver(Provider.RACKSPACE)
self.storage_driver_cls = get_storage_driver(StorageProvider.CLOUDFILES)
self.driver = None
self.storage_driver = None
self.region = None
self.instances = {}
@ -57,6 +63,26 @@ class RackspaceCtrl(BaseCloudCtrl):
self.regions = []
self.token = None
self.tenant_id = None
self.flavor_ep = "https://dfw.servers.api.rackspacecloud.com/v2/{username}/flavors"
self._flavors = OrderedDict([
('2', '512MB, 1 VCPU'),
('3', '1GB, 1 VCPU'),
('4', '2GB, 2 VCPUs'),
('5', '4GB, 2 VCPUs'),
('6', '8GB, 4 VCPUs'),
('7', '15GB, 6 VCPUs'),
('8', '30GB, 8 VCPUs'),
('performance1-1', '1GB Performance, 1 VCPU'),
('performance1-2', '2GB Performance, 2 VCPUs'),
('performance1-4', '4GB Performance, 4 VCPUs'),
('performance1-8', '8GB Performance, 8 VCPUs'),
('performance2-15', '15GB Performance, 4 VCPUs'),
('performance2-30', '30GB Performance, 8 VCPUs'),
('performance2-60', '60GB Performance, 16 VCPUs'),
('performance2-90', '90GB Performance, 24 VCPUs'),
('performance2-120', '120GB Performance, 32 VCPUs',)
])
def authenticate(self):
"""
@ -100,6 +126,7 @@ class RackspaceCtrl(BaseCloudCtrl):
self.authenticated = True
user_regions = self._parse_endpoints(api_data)
self.regions = self._make_region_list(user_regions)
self.tenant_id = self._parse_tenant_id(api_data)
else:
self.regions = []
@ -114,6 +141,11 @@ class RackspaceCtrl(BaseCloudCtrl):
return self.regions
def list_flavors(self):
""" Return the dictionary containing flavors id and names """
return self._flavors
def _parse_endpoints(self, api_data):
"""
Parse the JSON-encoded data returned by the Identity Service API.
@ -144,6 +176,17 @@ class RackspaceCtrl(BaseCloudCtrl):
return token
def _parse_tenant_id(self, api_data):
""" """
try:
roles = api_data['access']['user']['roles']
for role in roles:
if 'tenantId' in role and role['name'] == 'compute:default':
return role['tenantId']
return None
except KeyError:
return None
def _make_region_list(self, region_codes):
"""
Make a list of regions for use in the GUI.
@ -173,6 +216,8 @@ class RackspaceCtrl(BaseCloudCtrl):
try:
self.driver = self.driver_cls(self.username, self.api_key,
region=region)
self.storage_driver = self.storage_driver_cls(self.username, self.api_key,
region=region)
except ValueError:
return False
@ -189,14 +234,19 @@ class RackspaceCtrl(BaseCloudCtrl):
or, if access was already asked
[{"image_id": "", "member_id": "", "status": "ALREADYREQUESTED"},]
"""
endpoint = GNS3IAS_URL+"/images/grant_access"
endpoint = self.gns3_ias_url+"/images/grant_access"
params = {
"user_id": username,
"user_region": region,
"user_region": region.upper(),
"gns3_version": gns3_version,
}
response = requests.get(endpoint, params=params)
try:
response = requests.get(endpoint, params=params)
except requests.ConnectionError:
raise ApiError("Unable to connect to IAS")
status = response.status_code
if status == 200:
return response.json()
elif status == 404:
@ -209,17 +259,53 @@ class RackspaceCtrl(BaseCloudCtrl):
Return a dictionary containing RackSpace server images
retrieved from gns3-ias server
"""
if not (self.username and self.region):
return []
if not (self.tenant_id and self.region):
return {}
try:
response = self._get_shared_images(self.username, self.region, __version__)
shared_images = json.loads(response)
shared_images = self._get_shared_images(self.tenant_id, self.region, __version__)
images = {}
for i in shared_images:
images[i['image_id']] = i['image_name']
return images
except ItemNotFound:
return []
return {}
except ApiError as e:
log.error('Error while retrieving image list: %s' % e)
return {}
def get_image(self, image_id):
return self.driver.get_image(image_id)
def get_provider(cloud_settings):
"""
Utility function to retrieve a cloud provider instance already authenticated and with the
region set
:param cloud_settings: cloud settings dictionary
:return: a provider instance or None on errors
"""
try:
username = cloud_settings['cloud_user_name']
apikey = cloud_settings['cloud_api_key']
region = cloud_settings['cloud_region']
ias_url = cloud_settings['gns3_ias_url']
except KeyError as e:
log.error("Unable to create cloud provider: {}".format(e))
return
provider = RackspaceCtrl(username, apikey, ias_url)
if not provider.authenticate():
log.error("Authentication failed for cloud provider")
return
if not region:
region = provider.list_regions().values()[0]
if not provider.set_region(region):
log.error("Unable to set cloud provider region")
return
return provider