summaryrefslogtreecommitdiff
path: root/cloudinit/DataSource.py
blob: 43f66eff6a3b58947ec190da20e68485e58f27bc (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
# vi: ts=4 expandtab
#
#    Copyright (C) 2009-2010 Canonical Ltd.
#
#    Author: Scott Moser <scott.moser@canonical.com>
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU General Public License version 3, as
#    published by the Free Software Foundation.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU General Public License for more details.
#
#    You should have received a copy of the GNU General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.


DEP_FILESYSTEM = "FILESYSTEM"
DEP_NETWORK = "NETWORK"

import UserDataHandler as ud

log = None
def setlog(log_in=None, name="DataSource"):
    log = log_in
    if log is None:
        class NullHandler(logging.Handler):
            def emit(self,record): pass
        log = logging.getLogger(name)
        log.addHandler(NullHandler())

class DataSource:
    userdata = None
    metadata = None
    userdata_raw = None

    def __init__(self):
        pass

    def get_userdata(self):
        if self.userdata == None:
            self.userdata = ud.preprocess_userdata(self.userdata_raw)
        return self.userdata

    def get_userdata_raw(self):
        return(self.userdata_raw)


    # the data sources' config_obj is a cloud-config formated
    # object that came to it from ways other than cloud-config
    # because cloud-config content would be handled elsewhere
    def get_config_obj(self):
        return({ })

    def get_public_ssh_keys(self):
        keys = []
        if not self.metadata.has_key('public-keys'): return([])
        for keyname, klist in self.metadata['public-keys'].items():
            # lp:506332 uec metadata service responds with
            # data that makes boto populate a string for 'klist' rather
            # than a list.
            if isinstance(klist,str):
                klist = [ klist ]
            for pkey in klist:
                # there is an empty string at the end of the keylist, trim it
                if pkey:
                    keys.append(pkey)

        return(keys)

    def device_name_to_device(self, name):
        # translate a 'name' to a device
        # the primary function at this point is on ec2
        # to consult metadata service, that has
        #  ephemeral0: sdb
        # and return 'sdb' for input 'ephemeral0'
        return(None)

    def get_locale(self):
        return('en_US.UTF-8')

    def get_local_mirror(self):
        return('http://archive.ubuntu.com/ubuntu/')

    def get_instance_id(self):
        if 'instance-id' not in self.metadata:
            return "ubuntuhost"
        return(self.metadata['instance-id'])

    def get_hostname(self):
        if not 'local-hostname' in self.metadata:
            return None

        toks = self.metadata['local-hostname'].split('.')
        # if there is an ipv4 address in 'local-hostname', then
        # make up a hostname (LP: #475354)
        if len(toks) == 4:
            try:
                r = filter(lambda x: int(x) < 256 and x > 0, toks)
                if len(r) == 4:
                    return("ip-%s" % '-'.join(r))
            except: pass
        return toks[0]

# return a list of classes that have the same depends as 'depends'
# iterate through cfg_list, loading "DataSourceCollections" modules
# and calling their "get_datasource_list".
# return an ordered list of classes that match
#
# - modules must be named "DataSource<item>", where 'item' is an entry
#   in cfg_list
# - if pkglist is given, it will iterate try loading from that package
#   ie, pkglist=[ "foo", "" ]
#     will first try to load foo.DataSource<item>
#     then DataSource<item>
def list_sources(cfg_list, depends, pkglist=[]):
    retlist = []
    for ds_coll in cfg_list:
        for pkg in pkglist:
            if pkg: pkg="%s." % pkg
            try:
                mod = __import__("%sDataSource%s" % (pkg, ds_coll))
                if pkg:
                    mod = getattr(mod, "DataSource%s" % ds_coll)
                lister = getattr(mod, "get_datasource_list")
                retlist.extend(lister(depends))
                break
            except:
                raise
    return(retlist)

# depends is a list of dependencies (DEP_FILESYSTEM)
# dslist is a list of 2 item lists
# dslist = [ 
#   ( class, ( depends-that-this-class-needs ) )
# }
# it returns a list of 'class' that matched these deps exactly
# it is a helper function for DataSourceCollections
def list_from_depends(depends, dslist):
    retlist = [ ]
    depset = set(depends)
    for elem in dslist:
        (cls, deps) = elem
        if depset == set(deps):
            retlist.append(cls)
    return(retlist)