@ -112,6 +112,63 @@ class TestAutoRestoreS3(unittest.TestCase):
os . remove ( compressed_tmp_file )
delete_s3_object ( access_key_id , secret_access_key_id , S3_BUCKET , path )
@unittest.skipUnless ( env_present ( ' RQLITE_S3_ACCESS_KEY ' ) , " S3 credentials not available " )
def test_skipped_if_data ( self ) :
''' Test that automatic restores are skipped if the node has data '''
node = None
cfg = None
path = None
access_key_id = os . environ [ ' RQLITE_S3_ACCESS_KEY ' ]
secret_access_key_id = os . environ [ ' RQLITE_S3_SECRET_ACCESS_KEY ' ]
# Upload a test SQLite file to S3.
tmp_file = self . create_sqlite_file ( )
compressed_tmp_file = temp_file ( )
gzip_compress ( tmp_file , compressed_tmp_file )
path = " restore/ " + random_string ( 32 )
upload_s3_object ( access_key_id , secret_access_key_id , S3_BUCKET , path , compressed_tmp_file )
# Create the auto-restore config file
auto_restore_cfg = {
" version " : 1 ,
" type " : " s3 " ,
" sub " : {
" access_key_id " : access_key_id ,
" secret_access_key " : secret_access_key_id ,
" region " : S3_BUCKET_REGION ,
" bucket " : S3_BUCKET ,
" path " : path
}
}
cfg = write_random_file ( json . dumps ( auto_restore_cfg ) )
# Create a new node, write some data to it.
n0 = Node ( RQLITED_PATH , ' 0 ' )
n0 . start ( )
n0 . wait_for_ready ( )
n0 . execute ( ' CREATE TABLE bar (id INTEGER NOT NULL PRIMARY KEY, name TEXT) ' )
n0 . stop ( )
# Create a new node, using the directory from the previous node, but check
# that data is not restored from S3, wiping out the existing data.
n1 = Node ( RQLITED_PATH , ' 0 ' , dir = n0 . dir , auto_restore = cfg )
n1 . start ( )
n1 . wait_for_ready ( )
j = n1 . query ( ' SELECT * FROM bar ' )
self . assertEqual ( j , d_ ( " { ' results ' : [ { ' types ' : [ ' integer ' , ' text ' ], ' columns ' : [ ' id ' , ' name ' ]}]} " ) )
j = n1 . query ( ' SELECT * FROM foo ' )
self . assertEqual ( j , d_ ( " { ' results ' : [ { ' error ' : ' no such table: foo ' }]} " ) )
deprovision_node ( n0 )
deprovision_node ( n1 )
os . remove ( cfg )
os . remove ( tmp_file )
os . remove ( compressed_tmp_file )
delete_s3_object ( access_key_id , secret_access_key_id , S3_BUCKET , path )
class TestAutoBackupS3 ( unittest . TestCase ) :
@unittest.skipUnless ( env_present ( ' RQLITE_S3_ACCESS_KEY ' ) , " S3 credentials not available " )
def test_no_compress ( self ) :