From 857dab6b5dfc69cf398e6f8f873b4ef472b9e635 Mon Sep 17 00:00:00 2001
From: Scott Moser <smoser@nelson>
Date: Wed, 6 Jan 2010 01:40:29 -0500
Subject: support compressed user data, use cPickle instead of pickle

---
 ec2init/DataSource.py    | 20 +++++++++++++++++++-
 ec2init/DataSourceEc2.py | 25 ++++++++++++-------------
 ec2init/__init__.py      | 23 ++++++++++++++++++++---
 3 files changed, 51 insertions(+), 17 deletions(-)

diff --git a/ec2init/DataSource.py b/ec2init/DataSource.py
index da6170fd..b940c564 100644
--- a/ec2init/DataSource.py
+++ b/ec2init/DataSource.py
@@ -1,8 +1,26 @@
 
+import ec2init
+
 class DataSource:
+    userdata = None
+    metadata = None
+    userdata_raw = None
+
     def __init__(self):
        pass
 
+    def store_user_data_raw(self):
+        fp=fopen(user_data_raw,"wb")
+        fp.write(self.userdata_raw)
+        fp.close()
+    
+    def store_user_data(self):
+        fp=fopen(user_data,"wb")
+        fp.write(self.userdata)
+        fp.close()
+
     def get_user_data(self):
-        raise Exception("get_user_data Not-implemented")
+        if self.userdata == None:
+            self.userdata = ec2init.preprocess_user_data(self.userdata_raw)
 
+        return self.userdata
diff --git a/ec2init/DataSourceEc2.py b/ec2init/DataSourceEc2.py
index c3317272..8ee92d29 100644
--- a/ec2init/DataSourceEc2.py
+++ b/ec2init/DataSourceEc2.py
@@ -5,7 +5,7 @@ import boto.utils
 import socket
 import urllib2
 import time
-import pickle
+import cPickle
 
 class DataSourceEc2(DataSource.DataSource):
     api_ver  = '2009-04-04'
@@ -26,26 +26,25 @@ class DataSourceEc2(DataSource.DataSource):
         self.meta_data_base_url = 'http://169.254.169.254/%s/meta-data' % self.api_ver
         self.user_data_base_url = 'http://169.254.169.254/%s/user-data' % self.api_ver
 
-    def get_user_data(self):
-        return("hello")
-
     def get_data(self):
-        print "checking %s" % self.cachedir + "/user-data.pkl"
-        udf = open(self.cachedir + "/user-data.pkl")
-        self.userdata = pickle.load(udf)
-        udf.close()
+        try:
+            udf = open(self.cachedir + "/user-data.pkl")
+            self.userdata_raw = cPickle.load(udf)
+            udf.close()
 
-        mdf = open(self.cachedir + "/meta-data.pkl")
-        self.metadata = pickle.load(mdf)
-        mdf.close()
+            mdf = open(self.cachedir + "/meta-data.pkl")
+            self.metadata = cPickle.load(mdf)
+            mdf.close()
 
-        return True
+            return True
+        except:
+            pass
 
         try:
             if not self.wait_for_metadata_service():
                 return False
             self.metadata = boto.utils.get_instance_userdata(api_ver)
-            self.userdata = boto.utils.get_instance_metadata(api_ver)
+            self.userdata_raw = boto.utils.get_instance_metadata(api_ver)
         except Exception as e:
             print e
             return False
diff --git a/ec2init/__init__.py b/ec2init/__init__.py
index 05404c3a..60a87275 100644
--- a/ec2init/__init__.py
+++ b/ec2init/__init__.py
@@ -21,6 +21,7 @@ import os
 from   configobj import ConfigObj
 
 import boto.utils
+import cPickle
 
 datadir = '/var/lib/cloud/data'
 cachedir = datadir + '/cache'
@@ -36,7 +37,7 @@ class EC2Init:
     def restore_from_cache(self):
         try:
             f=open(cachedir + "/obj.pkl", "rb")
-            data = pickle.load(f)
+            data = cPickle.load(f)
             self.datasource = data
             return True
         except:
@@ -45,7 +46,7 @@ class EC2Init:
     def write_to_cache(self):
         try:
             f=open(cachedir + "/obj.pkl", "wb")
-            data = pickle.dump(self.datasource,f)
+            data = cPickle.dump(self.datasource,f)
             return True
         except:
             return False
@@ -60,7 +61,8 @@ class EC2Init:
                 if s.get_data():
                     self.datasource = s
                     return
-            except:
+            except Exception as e:
+                print e
                 pass
         raise Exception("Could not find data source")
 
@@ -77,3 +79,18 @@ class EC2Init:
         import subprocess
         subprocess.Popen(['initctl', 'CFG_FILE=%s' % user_config]).communicate()
 
+
+# if 'str' is compressed return decompressed otherwise return it
+def decomp_str(str):
+    import StringIO
+    import gzip
+    try:
+        uncomp = gzip.GzipFile(None,"rb",1,StringIO.StringIO(str)).read()
+        return(uncomp)
+    except:
+        return(str)
+
+
+# preprocess the user data (include / uncompress)
+def preprocess_user_data(ud):
+   return(decomp_str(ud))
-- 
cgit v1.2.3