1
0
mirror of https://github.com/GNS3/gns3-server synced 2024-11-24 09:18:08 +00:00

Merge pull request #7 from planctechnologies/gns-110

Support launching devices from cloud file images
This commit is contained in:
Nasrullah Taha 2014-10-30 14:23:43 -06:00
commit bf0b6ee534
9 changed files with 270 additions and 41 deletions

View File

@ -22,13 +22,24 @@ Base class for interacting with Cloud APIs to create and manage cloud
instances.
"""
from collections import namedtuple
import hashlib
import os
import logging
from io import StringIO, BytesIO
from libcloud.compute.base import NodeAuthSSHKey
from libcloud.storage.types import ContainerAlreadyExistsError, ContainerDoesNotExistError
from .exceptions import ItemNotFound, KeyPairExists, MethodNotAllowed
from .exceptions import OverLimit, BadRequest, ServiceUnavailable
from .exceptions import Unauthorized, ApiError
KeyPair = namedtuple("KeyPair", ['name'], verbose=False)
log = logging.getLogger(__name__)
def parse_exception(exception):
"""
Parse the exception to separate the HTTP status code from the text.
@ -67,6 +78,8 @@ class BaseCloudCtrl(object):
503: ServiceUnavailable
}
GNS3_CONTAINER_NAME = 'GNS3'
def __init__(self, username, api_key):
self.username = username
self.api_key = api_key
@ -89,23 +102,37 @@ class BaseCloudCtrl(object):
return self.driver.list_sizes()
def create_instance(self, name, size, image, keypair):
def list_flavors(self):
""" Return an iterable of flavors """
raise NotImplementedError
def create_instance(self, name, size_id, image_id, keypair):
"""
Create a new instance with the supplied attributes.
Return a Node object.
"""
auth_key = NodeAuthSSHKey(keypair.public_key)
try:
return self.driver.create_node(
name=name,
size=size,
image=image,
auth=auth_key
)
image = self.get_image(image_id)
if image is None:
raise ItemNotFound("Image not found")
size = self.driver.ex_get_size(size_id)
args = {
"name": name,
"size": size,
"image": image,
}
if keypair is not None:
auth_key = NodeAuthSSHKey(keypair.public_key)
args["auth"] = auth_key
args["ex_keyname"] = name
return self.driver.create_node(**args)
except Exception as e:
status, error_text = parse_exception(e)
@ -113,7 +140,8 @@ class BaseCloudCtrl(object):
if status:
self._handle_exception(status, error_text)
else:
raise e
log.error("create_instance method raised an exception: {}".format(e))
log.error('image id {}'.format(image))
def delete_instance(self, instance):
""" Delete the specified instance. Returns True or False. """
@ -142,7 +170,11 @@ class BaseCloudCtrl(object):
def list_instances(self):
""" Return a list of instances in the current region. """
return self.driver.list_nodes()
try:
return self.driver.list_nodes()
except Exception as e:
log.error("list_instances returned an error: {}".format(e))
def create_key_pair(self, name):
""" Create and return a new Key Pair. """
@ -173,7 +205,85 @@ class BaseCloudCtrl(object):
else:
raise e
def delete_key_pair_by_name(self, keypair_name):
""" Utility method to incapsulate boilerplate code """
kp = KeyPair(name=keypair_name)
return self.delete_key_pair(kp)
def list_key_pairs(self):
""" Return a list of Key Pairs. """
return self.driver.list_key_pairs()
def upload_file(self, file_path, folder):
"""
Uploads file to cloud storage (if it is not identical to a file already in cloud storage).
:param file_path: path to file to upload
:param folder: folder in cloud storage to save file in
:return: True if file was uploaded, False if it was skipped because it already existed and was identical
"""
try:
gns3_container = self.storage_driver.create_container(self.GNS3_CONTAINER_NAME)
except ContainerAlreadyExistsError:
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
with open(file_path, 'rb') as file:
local_file_hash = hashlib.md5(file.read()).hexdigest()
cloud_object_name = folder + '/' + os.path.basename(file_path)
cloud_hash_name = cloud_object_name + '.md5'
cloud_objects = [obj.name for obj in gns3_container.list_objects()]
# if the file and its hash are in object storage, and the local and storage file hashes match
# do not upload the file, otherwise upload it
if cloud_object_name in cloud_objects and cloud_hash_name in cloud_objects:
hash_object = gns3_container.get_object(cloud_hash_name)
cloud_object_hash = ''
for chunk in hash_object.as_stream():
cloud_object_hash += chunk.decode('utf8')
if cloud_object_hash == local_file_hash:
return False
file.seek(0)
self.storage_driver.upload_object_via_stream(file, gns3_container, cloud_object_name)
self.storage_driver.upload_object_via_stream(StringIO(local_file_hash), gns3_container, cloud_hash_name)
return True
def list_projects(self):
"""
Lists projects in cloud storage
:return: List of (project name, object name in storage)
"""
try:
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
projects = [
(obj.name.replace('projects/', '').replace('.zip', ''), obj.name)
for obj in gns3_container.list_objects()
if obj.name.startswith('projects/') and obj.name[-4:] == '.zip'
]
return projects
except ContainerDoesNotExistError:
return []
def download_file(self, file_name, destination=None):
"""
Downloads file from cloud storage
:param file_name: name of file in cloud storage to download
:param destination: local path to save file to (if None, returns file contents as a file-like object)
:return: A file-like object if file contents are returned, or None if file is saved to filesystem
"""
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
storage_object = gns3_container.get_object(file_name)
if destination is not None:
storage_object.download(destination)
else:
contents = b''
for chunk in storage_object.as_stream():
contents += chunk
return BytesIO(contents)

View File

@ -23,31 +23,37 @@ import requests
from libcloud.compute.drivers.rackspace import ENDPOINT_ARGS_MAP
from libcloud.compute.providers import get_driver
from libcloud.compute.types import Provider
from libcloud.storage.providers import get_driver as get_storage_driver
from libcloud.storage.types import Provider as StorageProvider
from .exceptions import ItemNotFound, ApiError
from ..version import __version__
from collections import OrderedDict
import logging
log = logging.getLogger(__name__)
RACKSPACE_REGIONS = [{ENDPOINT_ARGS_MAP[k]['region']: k} for k in
ENDPOINT_ARGS_MAP]
GNS3IAS_URL = 'http://localhost:8888' # TODO find a place for this value
class RackspaceCtrl(BaseCloudCtrl):
""" Controller class for interacting with Rackspace API. """
def __init__(self, username, api_key):
def __init__(self, username, api_key, gns3_ias_url):
super(RackspaceCtrl, self).__init__(username, api_key)
self.gns3_ias_url = gns3_ias_url
# set this up so it can be swapped out with a mock for testing
self.post_fn = requests.post
self.driver_cls = get_driver(Provider.RACKSPACE)
self.storage_driver_cls = get_storage_driver(StorageProvider.CLOUDFILES)
self.driver = None
self.storage_driver = None
self.region = None
self.instances = {}
@ -57,6 +63,26 @@ class RackspaceCtrl(BaseCloudCtrl):
self.regions = []
self.token = None
self.tenant_id = None
self.flavor_ep = "https://dfw.servers.api.rackspacecloud.com/v2/{username}/flavors"
self._flavors = OrderedDict([
('2', '512MB, 1 VCPU'),
('3', '1GB, 1 VCPU'),
('4', '2GB, 2 VCPUs'),
('5', '4GB, 2 VCPUs'),
('6', '8GB, 4 VCPUs'),
('7', '15GB, 6 VCPUs'),
('8', '30GB, 8 VCPUs'),
('performance1-1', '1GB Performance, 1 VCPU'),
('performance1-2', '2GB Performance, 2 VCPUs'),
('performance1-4', '4GB Performance, 4 VCPUs'),
('performance1-8', '8GB Performance, 8 VCPUs'),
('performance2-15', '15GB Performance, 4 VCPUs'),
('performance2-30', '30GB Performance, 8 VCPUs'),
('performance2-60', '60GB Performance, 16 VCPUs'),
('performance2-90', '90GB Performance, 24 VCPUs'),
('performance2-120', '120GB Performance, 32 VCPUs',)
])
def authenticate(self):
"""
@ -100,6 +126,7 @@ class RackspaceCtrl(BaseCloudCtrl):
self.authenticated = True
user_regions = self._parse_endpoints(api_data)
self.regions = self._make_region_list(user_regions)
self.tenant_id = self._parse_tenant_id(api_data)
else:
self.regions = []
@ -114,6 +141,11 @@ class RackspaceCtrl(BaseCloudCtrl):
return self.regions
def list_flavors(self):
""" Return the dictionary containing flavors id and names """
return self._flavors
def _parse_endpoints(self, api_data):
"""
Parse the JSON-encoded data returned by the Identity Service API.
@ -144,6 +176,17 @@ class RackspaceCtrl(BaseCloudCtrl):
return token
def _parse_tenant_id(self, api_data):
""" """
try:
roles = api_data['access']['user']['roles']
for role in roles:
if 'tenantId' in role and role['name'] == 'compute:default':
return role['tenantId']
return None
except KeyError:
return None
def _make_region_list(self, region_codes):
"""
Make a list of regions for use in the GUI.
@ -173,6 +216,8 @@ class RackspaceCtrl(BaseCloudCtrl):
try:
self.driver = self.driver_cls(self.username, self.api_key,
region=region)
self.storage_driver = self.storage_driver_cls(self.username, self.api_key,
region=region)
except ValueError:
return False
@ -189,14 +234,19 @@ class RackspaceCtrl(BaseCloudCtrl):
or, if access was already asked
[{"image_id": "", "member_id": "", "status": "ALREADYREQUESTED"},]
"""
endpoint = GNS3IAS_URL+"/images/grant_access"
endpoint = self.gns3_ias_url+"/images/grant_access"
params = {
"user_id": username,
"user_region": region,
"user_region": region.upper(),
"gns3_version": gns3_version,
}
response = requests.get(endpoint, params=params)
try:
response = requests.get(endpoint, params=params)
except requests.ConnectionError:
raise ApiError("Unable to connect to IAS")
status = response.status_code
if status == 200:
return response.json()
elif status == 404:
@ -209,17 +259,53 @@ class RackspaceCtrl(BaseCloudCtrl):
Return a dictionary containing RackSpace server images
retrieved from gns3-ias server
"""
if not (self.username and self.region):
return []
if not (self.tenant_id and self.region):
return {}
try:
response = self._get_shared_images(self.username, self.region, __version__)
shared_images = json.loads(response)
shared_images = self._get_shared_images(self.tenant_id, self.region, __version__)
images = {}
for i in shared_images:
images[i['image_id']] = i['image_name']
return images
except ItemNotFound:
return []
return {}
except ApiError as e:
log.error('Error while retrieving image list: %s' % e)
return {}
def get_image(self, image_id):
return self.driver.get_image(image_id)
def get_provider(cloud_settings):
"""
Utility function to retrieve a cloud provider instance already authenticated and with the
region set
:param cloud_settings: cloud settings dictionary
:return: a provider instance or None on errors
"""
try:
username = cloud_settings['cloud_user_name']
apikey = cloud_settings['cloud_api_key']
region = cloud_settings['cloud_region']
ias_url = cloud_settings.get('gns3_ias_url', '')
except KeyError as e:
log.error("Unable to create cloud provider: {}".format(e))
return
provider = RackspaceCtrl(username, apikey, ias_url)
if not provider.authenticate():
log.error("Authentication failed for cloud provider")
return
if not region:
region = provider.list_regions().values()[0]
if not provider.set_region(region):
log.error("Unable to set cloud provider region")
return
return provider

View File

@ -77,7 +77,7 @@ Options:
--cloud_user_name
--instance_id ID of the Rackspace instance to terminate
--region Region of instance
--cloud_region Region of instance
--deadtime How long in seconds can the communication lose exist before we
shutdown this instance.
@ -205,8 +205,8 @@ def parse_cmd_line(argv):
print(usage)
sys.exit(2)
if cmd_line_option_list["region"] is None:
print("You need to specify a region")
if cmd_line_option_list["cloud_region"] is None:
print("You need to specify a cloud_region")
print(usage)
sys.exit(2)

View File

@ -26,6 +26,8 @@ import configparser
import logging
log = logging.getLogger(__name__)
CLOUD_SERVER = 'CLOUD_SERVER'
class Config(object):
"""
@ -62,20 +64,30 @@ class Config(object):
# 5: server.conf in the current working directory
home = os.path.expanduser("~")
self._cloud_config = os.path.join(home, ".config", appname, "cloud.conf")
self._cloud_file = os.path.join(home, ".config", appname, "cloud.conf")
filename = "server.conf"
self._files = [os.path.join(home, ".config", appname, filename),
os.path.join(home, ".config", appname + ".conf"),
os.path.join("/etc/xdg", appname, filename),
os.path.join("/etc/xdg", appname + ".conf"),
filename,
self._cloud_config]
self._cloud_file]
self._config = configparser.ConfigParser()
self.read_config()
self._cloud_config = configparser.ConfigParser()
self.read_cloud_config()
def list_cloud_config_file(self):
return self._cloud_config
return self._cloud_file
def read_cloud_config(self):
parsed_file = self._cloud_config.read(self._cloud_file)
if not self._cloud_config.has_section(CLOUD_SERVER):
self._cloud_config.add_section(CLOUD_SERVER)
def cloud_settings(self):
return self._cloud_config[CLOUD_SERVER]
def read_config(self):
"""

View File

@ -78,11 +78,15 @@ class LoginHandler(tornado.web.RequestHandler):
self.set_secure_cookie("user", "None")
auth_status = "failure"
log.info("Authentication attempt %s: %s" %(auth_status, user))
log.info("Authentication attempt {}: {}, {}".format(auth_status, user, password))
try:
redirect_to = self.get_secure_cookie("login_success_redirect_to")
except tornado.web.MissingArgumentError:
redirect_to = "/"
self.redirect(redirect_to)
if redirect_to is None:
self.write({'result': auth_status})
else:
log.info('Redirecting to {}'.format(redirect_to))
self.redirect(redirect_to)

View File

@ -61,6 +61,7 @@ class IModule(multiprocessing.Process):
self._current_destination = None
self._current_call_id = None
self._stopping = False
self._cloud_settings = config.cloud_settings()
def _setup(self):
"""
@ -177,7 +178,6 @@ class IModule(multiprocessing.Process):
# add session to the response
response = [self._current_session, jsonrpc_response]
log.debug("ZeroMQ client ({}) sending: {}".format(self.name, response))
self._stream.send_json(response)
def send_param_error(self):

View File

@ -19,6 +19,7 @@ import os
import base64
import time
from gns3server.modules import IModule
from gns3dms.cloud.rackspace_ctrl import get_provider
from ..dynamips_error import DynamipsError
from ..nodes.c1700 import C1700
@ -140,12 +141,22 @@ class VM(object):
chassis = request.get("chassis")
router_id = request.get("router_id")
# Locate the image
updated_image_path = os.path.join(self.images_directory, image)
if os.path.isfile(updated_image_path):
image = updated_image_path
else:
if not os.path.exists(self.images_directory):
os.mkdir(self.images_directory)
if request.get("cloud_path", None):
# Download the image from cloud files
cloud_path = request.get("cloud_path")
full_cloud_path = "/".join((cloud_path, image))
provider = get_provider(self._cloud_settings)
provider.download_file(full_cloud_path, updated_image_path)
try:
if platform not in PLATFORMS:
raise DynamipsError("Unknown router platform: {}".format(platform))

View File

@ -67,6 +67,10 @@ VM_CREATE_SCHEMA = {
"type": "string",
"minLength": 1,
"pattern": "^([0-9a-fA-F]{4}\\.){2}[0-9a-fA-F]{4}$"
},
"cloud_path": {
"description": "Path to the image in the cloud object store",
"type": "string",
}
},
"additionalProperties": False,

View File

@ -61,6 +61,7 @@ USAGE: %s
Options:
-d, --debug Enable debugging
-i --ip The ip address of the server, for cert generation
-v, --verbose Enable verbose logging
-h, --help Display this menu :)
@ -79,6 +80,7 @@ def parse_cmd_line(argv):
short_args = "dvh"
long_args = ("debug",
"ip=",
"verbose",
"help",
"data=",
@ -105,6 +107,8 @@ def parse_cmd_line(argv):
sys.exit(0)
elif opt in ("-d", "--debug"):
cmd_line_option_list["debug"] = True
elif opt in ("--ip",):
cmd_line_option_list["ip"] = val
elif opt in ("-v", "--verbose"):
cmd_line_option_list["verbose"] = True
elif opt in ("--data",):
@ -151,7 +155,7 @@ def set_logging(cmd_options):
return log
def _generate_certs():
def _generate_certs(options):
"""
Generate a self-signed certificate for SSL-enabling the WebSocket
connection. The certificate is sent back to the client so it can
@ -159,7 +163,7 @@ def _generate_certs():
:return: A 2-tuple of strings containing (server_key, server_cert)
"""
cmd = ["{}/cert_utils/create_cert.sh".format(SCRIPT_PATH)]
cmd = ["{}/cert_utils/create_cert.sh".format(SCRIPT_PATH), options['ip']]
log.debug("Generating certs with cmd: {}".format(' '.join(cmd)))
output_raw = subprocess.check_output(cmd, shell=False,
stderr=subprocess.STDOUT)
@ -176,9 +180,9 @@ def _start_gns3server():
:return: None
"""
cmd = ['gns3server', '--quiet']
cmd = 'gns3server --quiet > /tmp/gns3.log 2>&1 &'
log.info("Starting gns3server with cmd {}".format(cmd))
subprocess.Popen(cmd, shell=False)
os.system(cmd)
def main():
@ -211,7 +215,7 @@ def main():
except FileExistsError:
pass
(server_key, server_crt) = _generate_certs()
(server_key, server_crt) = _generate_certs(options)
cloud_config = configparser.ConfigParser()
cloud_config['CLOUD_SERVER'] = {}
@ -221,15 +225,13 @@ def main():
cloud_config['CLOUD_SERVER']['SSL_KEY'] = server_key
cloud_config['CLOUD_SERVER']['SSL_CRT'] = server_crt
cloud_config['CLOUD_SERVER']['SSL_ENABLED'] = 'yes'
cloud_config['CLOUD_SERVER']['SSL_ENABLED'] = 'no'
cloud_config['CLOUD_SERVER']['WEB_USERNAME'] = str(uuid.uuid4()).upper()[0:8]
cloud_config['CLOUD_SERVER']['WEB_PASSWORD'] = str(uuid.uuid4()).upper()[0:8]
with open(cfg, 'w') as cloud_config_file:
cloud_config.write(cloud_config_file)
cloud_config_file.close()
_start_gns3server()
with open(server_crt, 'r') as cert_file: